Make return in memory explicit.
[official-gcc.git] / gcc / tree-inline.c
blobb34844d82f3945f7cb3537046c1ad9eaa32795c5
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "integrate.h"
36 #include "varray.h"
37 #include "hashtab.h"
38 #include "splay-tree.h"
39 #include "langhooks.h"
40 #include "cgraph.h"
41 #include "intl.h"
42 #include "tree-mudflap.h"
43 #include "function.h"
44 #include "diagnostic.h"
46 /* I'm not real happy about this, but we need to handle gimple and
47 non-gimple trees. */
48 #include "tree-iterator.h"
49 #include "tree-gimple.h"
51 /* 0 if we should not perform inlining.
52 1 if we should expand functions calls inline at the tree level.
53 2 if we should consider *all* functions to be inline
54 candidates. */
56 int flag_inline_trees = 0;
58 /* To Do:
60 o In order to make inlining-on-trees work, we pessimized
61 function-local static constants. In particular, they are now
62 always output, even when not addressed. Fix this by treating
63 function-local static constants just like global static
64 constants; the back-end already knows not to output them if they
65 are not needed.
67 o Provide heuristics to clamp inlining of recursive template
68 calls? */
70 /* Data required for function inlining. */
72 typedef struct inline_data
74 /* A stack of the functions we are inlining. For example, if we are
75 compiling `f', which calls `g', which calls `h', and we are
76 inlining the body of `h', the stack will contain, `h', followed
77 by `g', followed by `f'. The first few elements of the stack may
78 contain other functions that we know we should not recurse into,
79 even though they are not directly being inlined. */
80 varray_type fns;
81 /* The index of the first element of FNS that really represents an
82 inlined function. */
83 unsigned first_inlined_fn;
84 /* The label to jump to when a return statement is encountered. If
85 this value is NULL, then return statements will simply be
86 remapped as return statements, rather than as jumps. */
87 tree ret_label;
88 /* The VAR_DECL for the return value. */
89 tree retvar;
90 /* The map from local declarations in the inlined function to
91 equivalents in the function into which it is being inlined. */
92 splay_tree decl_map;
93 /* Nonzero if we are currently within the cleanup for a
94 TARGET_EXPR. */
95 int in_target_cleanup_p;
96 /* A list of the functions current function has inlined. */
97 varray_type inlined_fns;
98 /* We use the same mechanism to build clones that we do to perform
99 inlining. However, there are a few places where we need to
100 distinguish between those two situations. This flag is true if
101 we are cloning, rather than inlining. */
102 bool cloning_p;
103 /* Similarly for saving function body. */
104 bool saving_p;
105 /* Hash table used to prevent walk_tree from visiting the same node
106 umpteen million times. */
107 htab_t tree_pruner;
108 /* Callgraph node of function we are inlining into. */
109 struct cgraph_node *node;
110 /* Callgraph node of currently inlined function. */
111 struct cgraph_node *current_node;
112 /* Statement iterator. We need this so we can keep the tree in
113 gimple form when we insert the inlined function. It is not
114 used when we are not dealing with gimple trees. */
115 tree_stmt_iterator tsi;
116 } inline_data;
118 /* Prototypes. */
120 /* The approximate number of instructions per statement. This number
121 need not be particularly accurate; it is used only to make
122 decisions about when a function is too big to inline. */
123 #define INSNS_PER_STMT (10)
125 static tree copy_body_r (tree *, int *, void *);
126 static tree copy_body (inline_data *);
127 static tree expand_call_inline (tree *, int *, void *);
128 static void expand_calls_inline (tree *, inline_data *);
129 static bool inlinable_function_p (tree);
130 static tree remap_decl (tree, inline_data *);
131 static tree remap_type (tree, inline_data *);
132 static tree initialize_inlined_parameters (inline_data *, tree,
133 tree, tree, tree);
134 static void remap_block (tree *, inline_data *);
135 static tree remap_decls (tree, inline_data *);
136 static void copy_bind_expr (tree *, int *, inline_data *);
137 static tree mark_local_for_remap_r (tree *, int *, void *);
138 static tree unsave_r (tree *, int *, void *);
139 static void declare_inline_vars (tree bind_expr, tree vars);
141 /* Insert a tree->tree mapping for ID. Despite the name suggests
142 that the trees should be variables, it is used for more than that. */
144 static void
145 insert_decl_map (inline_data *id, tree key, tree value)
147 splay_tree_insert (id->decl_map, (splay_tree_key) key,
148 (splay_tree_value) value);
150 /* Always insert an identity map as well. If we see this same new
151 node again, we won't want to duplicate it a second time. */
152 if (key != value)
153 splay_tree_insert (id->decl_map, (splay_tree_key) value,
154 (splay_tree_value) value);
157 /* Remap DECL during the copying of the BLOCK tree for the function.
158 We are only called to remap local variables in the current function. */
160 static tree
161 remap_decl (tree decl, inline_data *id)
163 splay_tree_node n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
164 tree fn = VARRAY_TOP_TREE (id->fns);
166 /* See if we have remapped this declaration. If we didn't already have an
167 equivalent for this declaration, create one now. */
168 if (!n)
170 /* Make a copy of the variable or label. */
171 tree t = copy_decl_for_inlining (decl, fn, VARRAY_TREE (id->fns, 0));
173 /* Remap types, if necessary. */
174 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
175 if (TREE_CODE (t) == TYPE_DECL)
176 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
177 else if (TREE_CODE (t) == PARM_DECL)
178 DECL_ARG_TYPE_AS_WRITTEN (t)
179 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
181 /* Remap sizes as necessary. */
182 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
183 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
185 /* If fields, do likewise for offset and qualifier. */
186 if (TREE_CODE (t) == FIELD_DECL)
188 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
189 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
190 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
193 #if 0
194 /* FIXME handle anon aggrs. */
195 if (! DECL_NAME (t) && TREE_TYPE (t)
196 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
198 /* For a VAR_DECL of anonymous type, we must also copy the
199 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
200 tree members = NULL;
201 tree src;
203 for (src = DECL_ANON_UNION_ELEMS (t); src;
204 src = TREE_CHAIN (src))
206 tree member = remap_decl (TREE_VALUE (src), id);
208 if (TREE_PURPOSE (src))
209 abort ();
210 members = tree_cons (NULL, member, members);
212 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
214 #endif
216 /* Remember it, so that if we encounter this local entity
217 again we can reuse this copy. */
218 insert_decl_map (id, decl, t);
219 return t;
222 return unshare_expr ((tree) n->value);
225 static tree
226 remap_type (tree type, inline_data *id)
228 splay_tree_node node;
229 tree new, t;
231 if (type == NULL)
232 return type;
234 /* See if we have remapped this type. */
235 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
236 if (node)
237 return (tree) node->value;
239 /* The type only needs remapping if it's variably modified by a variable
240 in the function we are inlining. */
241 if (! variably_modified_type_p (type, VARRAY_TOP_TREE (id->fns)))
243 insert_decl_map (id, type, type);
244 return type;
247 /* We do need a copy. build and register it now. If this is a pointer or
248 reference type, remap the designated type and make a new pointer or
249 reference type. */
250 if (TREE_CODE (type) == POINTER_TYPE)
252 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
253 TYPE_MODE (type),
254 TYPE_REF_CAN_ALIAS_ALL (type));
255 insert_decl_map (id, type, new);
256 return new;
258 else if (TREE_CODE (type) == REFERENCE_TYPE)
260 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
261 TYPE_MODE (type),
262 TYPE_REF_CAN_ALIAS_ALL (type));
263 insert_decl_map (id, type, new);
264 return new;
266 else
267 new = copy_node (type);
269 insert_decl_map (id, type, new);
271 /* This is a new type, not a copy of an old type. Need to reassociate
272 variants. We can handle everything except the main variant lazily. */
273 t = TYPE_MAIN_VARIANT (type);
274 if (type != t)
276 t = remap_type (t, id);
277 TYPE_MAIN_VARIANT (new) = t;
278 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
279 TYPE_NEXT_VARIANT (t) = new;
281 else
283 TYPE_MAIN_VARIANT (new) = new;
284 TYPE_NEXT_VARIANT (new) = NULL;
287 /* Lazily create pointer and reference types. */
288 TYPE_POINTER_TO (new) = NULL;
289 TYPE_REFERENCE_TO (new) = NULL;
291 switch (TREE_CODE (new))
293 case INTEGER_TYPE:
294 case REAL_TYPE:
295 case ENUMERAL_TYPE:
296 case BOOLEAN_TYPE:
297 case CHAR_TYPE:
298 t = TYPE_MIN_VALUE (new);
299 if (t && TREE_CODE (t) != INTEGER_CST)
300 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
302 t = TYPE_MAX_VALUE (new);
303 if (t && TREE_CODE (t) != INTEGER_CST)
304 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
305 return new;
307 case FUNCTION_TYPE:
308 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
309 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
310 return new;
312 case ARRAY_TYPE:
313 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
314 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
315 break;
317 case RECORD_TYPE:
318 case UNION_TYPE:
319 case QUAL_UNION_TYPE:
320 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
321 break;
323 case FILE_TYPE:
324 case SET_TYPE:
325 case OFFSET_TYPE:
326 default:
327 /* Shouldn't have been thought variable sized. */
328 abort ();
331 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
332 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
334 return new;
337 static tree
338 remap_decls (tree decls, inline_data *id)
340 tree old_var;
341 tree new_decls = NULL_TREE;
343 /* Remap its variables. */
344 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
346 tree new_var;
348 /* Remap the variable. */
349 new_var = remap_decl (old_var, id);
351 /* If we didn't remap this variable, so we can't mess with its
352 TREE_CHAIN. If we remapped this variable to the return slot, it's
353 already declared somewhere else, so don't declare it here. */
354 if (!new_var || new_var == id->retvar)
356 #ifdef ENABLE_CHECKING
357 else if (!DECL_P (new_var))
358 abort ();
359 #endif
360 else
362 TREE_CHAIN (new_var) = new_decls;
363 new_decls = new_var;
367 return nreverse (new_decls);
370 /* Copy the BLOCK to contain remapped versions of the variables
371 therein. And hook the new block into the block-tree. */
373 static void
374 remap_block (tree *block, inline_data *id)
376 tree old_block;
377 tree new_block;
378 tree fn;
380 /* Make the new block. */
381 old_block = *block;
382 new_block = make_node (BLOCK);
383 TREE_USED (new_block) = TREE_USED (old_block);
384 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
385 *block = new_block;
387 /* Remap its variables. */
388 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
390 fn = VARRAY_TREE (id->fns, 0);
391 #if 1
392 /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in
393 rest_of_compilation is a good start. */
394 if (id->cloning_p)
395 /* We're building a clone; DECL_INITIAL is still
396 error_mark_node, and current_binding_level is the parm
397 binding level. */
398 lang_hooks.decls.insert_block (new_block);
399 else
401 /* Attach this new block after the DECL_INITIAL block for the
402 function into which this block is being inlined. In
403 rest_of_compilation we will straighten out the BLOCK tree. */
404 tree *first_block;
405 if (DECL_INITIAL (fn))
406 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
407 else
408 first_block = &DECL_INITIAL (fn);
409 BLOCK_CHAIN (new_block) = *first_block;
410 *first_block = new_block;
412 #endif
413 /* Remember the remapped block. */
414 insert_decl_map (id, old_block, new_block);
417 static void
418 copy_statement_list (tree *tp)
420 tree_stmt_iterator oi, ni;
421 tree new;
423 new = alloc_stmt_list ();
424 ni = tsi_start (new);
425 oi = tsi_start (*tp);
426 *tp = new;
428 for (; !tsi_end_p (oi); tsi_next (&oi))
429 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
432 static void
433 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
435 tree block = BIND_EXPR_BLOCK (*tp);
436 /* Copy (and replace) the statement. */
437 copy_tree_r (tp, walk_subtrees, NULL);
438 if (block)
440 remap_block (&block, id);
441 BIND_EXPR_BLOCK (*tp) = block;
444 if (BIND_EXPR_VARS (*tp))
445 /* This will remap a lot of the same decls again, but this should be
446 harmless. */
447 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
450 /* Called from copy_body via walk_tree. DATA is really an `inline_data *'. */
452 static tree
453 copy_body_r (tree *tp, int *walk_subtrees, void *data)
455 inline_data *id = (inline_data *) data;
456 tree fn = VARRAY_TOP_TREE (id->fns);
458 #if 0
459 /* All automatic variables should have a DECL_CONTEXT indicating
460 what function they come from. */
461 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
462 && DECL_NAMESPACE_SCOPE_P (*tp))
463 if (! DECL_EXTERNAL (*tp) && ! TREE_STATIC (*tp))
464 abort ();
465 #endif
467 /* If this is a RETURN_EXPR, change it into a MODIFY_EXPR and a
468 GOTO_EXPR with the RET_LABEL as its target. */
469 if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
471 tree return_stmt = *tp;
472 tree goto_stmt;
474 /* Build the GOTO_EXPR. */
475 tree assignment = TREE_OPERAND (return_stmt, 0);
476 goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
477 TREE_USED (id->ret_label) = 1;
479 /* If we're returning something, just turn that into an
480 assignment into the equivalent of the original
481 RESULT_DECL. */
482 if (assignment)
484 /* Do not create a statement containing a naked RESULT_DECL. */
485 if (TREE_CODE (assignment) == RESULT_DECL)
486 gimplify_stmt (&assignment);
488 *tp = build (BIND_EXPR, void_type_node, NULL, NULL, NULL);
489 append_to_statement_list (assignment, &BIND_EXPR_BODY (*tp));
490 append_to_statement_list (goto_stmt, &BIND_EXPR_BODY (*tp));
492 /* If we're not returning anything just do the jump. */
493 else
494 *tp = goto_stmt;
496 /* Local variables and labels need to be replaced by equivalent
497 variables. We don't want to copy static variables; there's only
498 one of those, no matter how many times we inline the containing
499 function. Similarly for globals from an outer function. */
500 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
502 tree new_decl;
504 /* Remap the declaration. */
505 new_decl = remap_decl (*tp, id);
506 if (! new_decl)
507 abort ();
508 /* Replace this variable with the copy. */
509 STRIP_TYPE_NOPS (new_decl);
510 *tp = new_decl;
512 #if 0
513 else if (nonstatic_local_decl_p (*tp)
514 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
515 abort ();
516 #endif
517 else if (TREE_CODE (*tp) == STATEMENT_LIST)
518 copy_statement_list (tp);
519 else if (TREE_CODE (*tp) == SAVE_EXPR)
520 remap_save_expr (tp, id->decl_map, walk_subtrees);
521 else if (TREE_CODE (*tp) == BIND_EXPR)
522 copy_bind_expr (tp, walk_subtrees, id);
523 else if (TREE_CODE (*tp) == LABELED_BLOCK_EXPR)
525 /* We need a new copy of this labeled block; the EXIT_BLOCK_EXPR
526 will refer to it, so save a copy ready for remapping. We
527 save it in the decl_map, although it isn't a decl. */
528 tree new_block = copy_node (*tp);
529 insert_decl_map (id, *tp, new_block);
530 *tp = new_block;
532 else if (TREE_CODE (*tp) == EXIT_BLOCK_EXPR)
534 splay_tree_node n
535 = splay_tree_lookup (id->decl_map,
536 (splay_tree_key) TREE_OPERAND (*tp, 0));
537 /* We _must_ have seen the enclosing LABELED_BLOCK_EXPR. */
538 if (! n)
539 abort ();
540 *tp = copy_node (*tp);
541 TREE_OPERAND (*tp, 0) = (tree) n->value;
543 /* Types may need remapping as well. */
544 else if (TYPE_P (*tp))
545 *tp = remap_type (*tp, id);
547 /* Otherwise, just copy the node. Note that copy_tree_r already
548 knows not to copy VAR_DECLs, etc., so this is safe. */
549 else
551 tree old_node = *tp;
553 if (TREE_CODE (*tp) == MODIFY_EXPR
554 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
555 && (lang_hooks.tree_inlining.auto_var_in_fn_p
556 (TREE_OPERAND (*tp, 0), fn)))
558 /* Some assignments VAR = VAR; don't generate any rtl code
559 and thus don't count as variable modification. Avoid
560 keeping bogosities like 0 = 0. */
561 tree decl = TREE_OPERAND (*tp, 0), value;
562 splay_tree_node n;
564 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
565 if (n)
567 value = (tree) n->value;
568 STRIP_TYPE_NOPS (value);
569 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
571 *tp = value;
572 return copy_body_r (tp, walk_subtrees, data);
576 else if (TREE_CODE (*tp) == INDIRECT_REF)
578 /* Get rid of *& from inline substitutions that can happen when a
579 pointer argument is an ADDR_EXPR. */
580 tree decl = TREE_OPERAND (*tp, 0), value;
581 splay_tree_node n;
583 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
584 if (n)
586 value = (tree) n->value;
587 STRIP_NOPS (value);
588 if (TREE_CODE (value) == ADDR_EXPR
589 && (lang_hooks.types_compatible_p
590 (TREE_TYPE (*tp), TREE_TYPE (TREE_OPERAND (value, 0)))))
592 *tp = TREE_OPERAND (value, 0);
593 return copy_body_r (tp, walk_subtrees, data);
598 copy_tree_r (tp, walk_subtrees, NULL);
600 if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp))
602 if (id->saving_p)
604 struct cgraph_node *node;
605 struct cgraph_edge *edge;
607 for (node = id->node->next_clone; node; node = node->next_clone)
609 edge = cgraph_edge (node, old_node);
610 if (edge)
611 edge->call_expr = *tp;
612 else
613 abort ();
616 else
618 struct cgraph_edge *edge
619 = cgraph_edge (id->current_node, old_node);
621 if (edge)
622 cgraph_clone_edge (edge, id->node, *tp);
626 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
628 /* The copied TARGET_EXPR has never been expanded, even if the
629 original node was expanded already. */
630 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
632 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
633 TREE_OPERAND (*tp, 3) = NULL_TREE;
637 /* Keep iterating. */
638 return NULL_TREE;
641 /* Make a copy of the body of FN so that it can be inserted inline in
642 another function. */
644 static tree
645 copy_body (inline_data *id)
647 tree body;
648 tree fndecl = VARRAY_TOP_TREE (id->fns);
650 if (fndecl == current_function_decl
651 && cfun->saved_tree)
652 body = cfun->saved_tree;
653 else
654 body = DECL_SAVED_TREE (fndecl);
655 walk_tree (&body, copy_body_r, id, NULL);
657 return body;
660 static void
661 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
662 tree *init_stmts, tree *vars, bool *gimplify_init_stmts_p)
664 tree init_stmt;
665 tree var;
667 /* If the parameter is never assigned to, we may not need to
668 create a new variable here at all. Instead, we may be able
669 to just use the argument value. */
670 if (TREE_READONLY (p)
671 && !TREE_ADDRESSABLE (p)
672 && value && !TREE_SIDE_EFFECTS (value))
674 /* We can't risk substituting complex expressions. They
675 might contain variables that will be assigned to later.
676 Theoretically, we could check the expression to see if
677 all of the variables that determine its value are
678 read-only, but we don't bother. */
679 /* We may produce non-gimple trees by adding NOPs or introduce
680 invalid sharing when operand is not really constant.
681 It is not big deal to prohibit constant propagation here as
682 we will constant propagate in DOM1 pass anyway. */
683 if (is_gimple_min_invariant (value)
684 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p)))
686 insert_decl_map (id, p, value);
687 return;
691 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
692 here since the type of this decl must be visible to the calling
693 function. */
694 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
696 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
697 that way, when the PARM_DECL is encountered, it will be
698 automatically replaced by the VAR_DECL. */
699 insert_decl_map (id, p, var);
701 /* Declare this new variable. */
702 TREE_CHAIN (var) = *vars;
703 *vars = var;
705 /* Make gimplifier happy about this variable. */
706 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
708 /* Even if P was TREE_READONLY, the new VAR should not be.
709 In the original code, we would have constructed a
710 temporary, and then the function body would have never
711 changed the value of P. However, now, we will be
712 constructing VAR directly. The constructor body may
713 change its value multiple times as it is being
714 constructed. Therefore, it must not be TREE_READONLY;
715 the back-end assumes that TREE_READONLY variable is
716 assigned to only once. */
717 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
718 TREE_READONLY (var) = 0;
720 /* Initialize this VAR_DECL from the equivalent argument. Convert
721 the argument to the proper type in case it was promoted. */
722 if (value)
724 tree rhs = fold_convert (TREE_TYPE (var), value);
726 if (rhs == error_mark_node)
727 return;
729 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
730 keep our trees in gimple form. */
731 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
732 append_to_statement_list (init_stmt, init_stmts);
734 /* If we did not create a gimple value and we did not create a gimple
735 cast of a gimple value, then we will need to gimplify INIT_STMTS
736 at the end. Note that is_gimple_cast only checks the outer
737 tree code, not its operand. Thus the explicit check that it's
738 operand is a gimple value. */
739 if (!is_gimple_val (rhs)
740 && (!is_gimple_cast (rhs)
741 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
742 *gimplify_init_stmts_p = true;
746 /* Generate code to initialize the parameters of the function at the
747 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
749 static tree
750 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
751 tree fn, tree bind_expr)
753 tree init_stmts = NULL_TREE;
754 tree parms;
755 tree a;
756 tree p;
757 tree vars = NULL_TREE;
758 bool gimplify_init_stmts_p = false;
759 int argnum = 0;
761 /* Figure out what the parameters are. */
762 parms = DECL_ARGUMENTS (fn);
763 if (fn == current_function_decl)
764 parms = cfun->saved_args;
766 /* Loop through the parameter declarations, replacing each with an
767 equivalent VAR_DECL, appropriately initialized. */
768 for (p = parms, a = args; p;
769 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
771 tree value;
773 ++argnum;
775 /* Find the initializer. */
776 value = lang_hooks.tree_inlining.convert_parm_for_inlining
777 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
779 setup_one_parameter (id, p, value, fn, &init_stmts, &vars,
780 &gimplify_init_stmts_p);
783 /* Evaluate trailing arguments. */
784 for (; a; a = TREE_CHAIN (a))
786 tree value = TREE_VALUE (a);
787 append_to_statement_list (value, &init_stmts);
790 /* Initialize the static chain. */
791 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
792 if (p)
794 /* No static chain? Seems like a bug in tree-nested.c. */
795 if (!static_chain)
796 abort ();
798 setup_one_parameter (id, p, static_chain, fn, &init_stmts, &vars,
799 &gimplify_init_stmts_p);
802 if (gimplify_init_stmts_p)
803 gimplify_body (&init_stmts, current_function_decl);
805 declare_inline_vars (bind_expr, vars);
806 return init_stmts;
809 /* Declare a return variable to replace the RESULT_DECL for the function we
810 are calling. RETURN_SLOT_ADDR, if non-null, was a fake parameter that
811 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
812 the MODIFY_EXPR to which this call is the RHS.
814 The return value is a (possibly null) value that is the result of the
815 function as seen by the callee. *USE_P is a (possibly null) value that
816 holds the result as seen by the caller. */
818 static tree
819 declare_return_variable (inline_data *id, tree return_slot_addr,
820 tree modify_dest, tree *use_p)
822 tree callee = VARRAY_TOP_TREE (id->fns);
823 tree caller = VARRAY_TREE (id->fns, 0);
824 tree result = DECL_RESULT (callee);
825 tree callee_type = TREE_TYPE (result);
826 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
827 tree var, use;
829 /* We don't need to do anything for functions that don't return
830 anything. */
831 if (!result || VOID_TYPE_P (callee_type))
833 *use_p = NULL_TREE;
834 return NULL_TREE;
837 /* If there was a return slot, then the return value is the
838 dereferenced address of that object. */
839 if (return_slot_addr)
841 /* The front end shouldn't have used both return_slot_addr and
842 a modify expression. */
843 if (modify_dest)
844 abort ();
845 if (DECL_BY_REFERENCE (result))
846 var = return_slot_addr;
847 else
848 var = build_fold_indirect_ref (return_slot_addr);
849 use = NULL;
850 goto done;
853 /* All types requiring non-trivial constructors should have been handled. */
854 if (TREE_ADDRESSABLE (callee_type))
855 abort ();
857 /* Attempt to avoid creating a new temporary variable. */
858 if (modify_dest)
860 bool use_it = false;
862 /* We can't use MODIFY_DEST if there's type promotion involved. */
863 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
864 use_it = false;
866 /* ??? If we're assigning to a variable sized type, then we must
867 reuse the destination variable, because we've no good way to
868 create variable sized temporaries at this point. */
869 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
870 use_it = true;
872 /* If the callee cannot possibly modify MODIFY_DEST, then we can
873 reuse it as the result of the call directly. Don't do this if
874 it would promote MODIFY_DEST to addressable. */
875 else if (!TREE_STATIC (modify_dest)
876 && !TREE_ADDRESSABLE (modify_dest)
877 && !TREE_ADDRESSABLE (result))
878 use_it = true;
880 if (use_it)
882 var = modify_dest;
883 use = NULL;
884 goto done;
888 if (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) != INTEGER_CST)
889 abort ();
891 var = copy_decl_for_inlining (result, callee, caller);
892 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
893 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
894 = tree_cons (NULL_TREE, var,
895 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
897 /* Do not have the rest of GCC warn about this variable as it should
898 not be visible to the user. */
899 TREE_NO_WARNING (var) = 1;
901 /* Build the use expr. If the return type of the function was
902 promoted, convert it back to the expected type. */
903 use = var;
904 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
905 use = fold_convert (caller_type, var);
907 done:
908 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
909 way, when the RESULT_DECL is encountered, it will be
910 automatically replaced by the VAR_DECL. */
911 insert_decl_map (id, result, var);
913 /* Remember this so we can ignore it in remap_decls. */
914 id->retvar = var;
916 *use_p = use;
917 return var;
920 /* Returns nonzero if a function can be inlined as a tree. */
922 bool
923 tree_inlinable_function_p (tree fn)
925 return inlinable_function_p (fn);
928 static const char *inline_forbidden_reason;
930 static tree
931 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
932 void *fnp)
934 tree node = *nodep;
935 tree fn = (tree) fnp;
936 tree t;
938 switch (TREE_CODE (node))
940 case CALL_EXPR:
941 /* Refuse to inline alloca call unless user explicitly forced so as
942 this may change program's memory overhead drastically when the
943 function using alloca is called in loop. In GCC present in
944 SPEC2000 inlining into schedule_block cause it to require 2GB of
945 RAM instead of 256MB. */
946 if (alloca_call_p (node)
947 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
949 inline_forbidden_reason
950 = N_("%Jfunction '%F' can never be inlined because it uses "
951 "alloca (override using the always_inline attribute)");
952 return node;
954 t = get_callee_fndecl (node);
955 if (! t)
956 break;
958 /* We cannot inline functions that call setjmp. */
959 if (setjmp_call_p (t))
961 inline_forbidden_reason
962 = N_("%Jfunction '%F' can never be inlined because it uses setjmp");
963 return node;
966 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
967 switch (DECL_FUNCTION_CODE (t))
969 /* We cannot inline functions that take a variable number of
970 arguments. */
971 case BUILT_IN_VA_START:
972 case BUILT_IN_STDARG_START:
973 case BUILT_IN_NEXT_ARG:
974 case BUILT_IN_VA_END:
975 inline_forbidden_reason
976 = N_("%Jfunction '%F' can never be inlined because it "
977 "uses variable argument lists");
978 return node;
980 case BUILT_IN_LONGJMP:
981 /* We can't inline functions that call __builtin_longjmp at
982 all. The non-local goto machinery really requires the
983 destination be in a different function. If we allow the
984 function calling __builtin_longjmp to be inlined into the
985 function calling __builtin_setjmp, Things will Go Awry. */
986 inline_forbidden_reason
987 = N_("%Jfunction '%F' can never be inlined because "
988 "it uses setjmp-longjmp exception handling");
989 return node;
991 case BUILT_IN_NONLOCAL_GOTO:
992 /* Similarly. */
993 inline_forbidden_reason
994 = N_("%Jfunction '%F' can never be inlined because "
995 "it uses non-local goto");
996 return node;
998 default:
999 break;
1001 break;
1003 case BIND_EXPR:
1004 for (t = BIND_EXPR_VARS (node); t ; t = TREE_CHAIN (t))
1006 /* We cannot inline functions that contain other functions. */
1007 if (TREE_CODE (t) == FUNCTION_DECL && DECL_INITIAL (t))
1009 inline_forbidden_reason
1010 = N_("%Jfunction '%F' can never be inlined "
1011 "because it contains a nested function");
1012 return node;
1015 break;
1017 case GOTO_EXPR:
1018 t = TREE_OPERAND (node, 0);
1020 /* We will not inline a function which uses computed goto. The
1021 addresses of its local labels, which may be tucked into
1022 global storage, are of course not constant across
1023 instantiations, which causes unexpected behavior. */
1024 if (TREE_CODE (t) != LABEL_DECL)
1026 inline_forbidden_reason
1027 = N_("%Jfunction '%F' can never be inlined "
1028 "because it contains a computed goto");
1029 return node;
1031 break;
1033 case LABEL_EXPR:
1034 t = TREE_OPERAND (node, 0);
1035 if (DECL_NONLOCAL (t))
1037 /* We cannot inline a function that receives a non-local goto
1038 because we cannot remap the destination label used in the
1039 function that is performing the non-local goto. */
1040 inline_forbidden_reason
1041 = N_("%Jfunction '%F' can never be inlined "
1042 "because it receives a non-local goto");
1043 return node;
1045 break;
1047 case RECORD_TYPE:
1048 case UNION_TYPE:
1049 /* We cannot inline a function of the form
1051 void F (int i) { struct S { int ar[i]; } s; }
1053 Attempting to do so produces a catch-22.
1054 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1055 UNION_TYPE nodes, then it goes into infinite recursion on a
1056 structure containing a pointer to its own type. If it doesn't,
1057 then the type node for S doesn't get adjusted properly when
1058 F is inlined, and we abort in find_function_data. */
1059 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1060 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1062 inline_forbidden_reason
1063 = N_("%Jfunction '%F' can never be inlined "
1064 "because it uses variable sized variables");
1065 return node;
1068 default:
1069 break;
1072 return NULL_TREE;
1075 /* Return subexpression representing possible alloca call, if any. */
1076 static tree
1077 inline_forbidden_p (tree fndecl)
1079 location_t saved_loc = input_location;
1080 tree ret = walk_tree_without_duplicates (&DECL_SAVED_TREE (fndecl),
1081 inline_forbidden_p_1, fndecl);
1083 input_location = saved_loc;
1084 return ret;
1087 /* Returns nonzero if FN is a function that does not have any
1088 fundamental inline blocking properties. */
1090 static bool
1091 inlinable_function_p (tree fn)
1093 bool inlinable = true;
1095 /* If we've already decided this function shouldn't be inlined,
1096 there's no need to check again. */
1097 if (DECL_UNINLINABLE (fn))
1098 return false;
1100 /* See if there is any language-specific reason it cannot be
1101 inlined. (It is important that this hook be called early because
1102 in C++ it may result in template instantiation.)
1103 If the function is not inlinable for language-specific reasons,
1104 it is left up to the langhook to explain why. */
1105 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1107 /* If we don't have the function body available, we can't inline it.
1108 However, this should not be recorded since we also get here for
1109 forward declared inline functions. Therefore, return at once. */
1110 if (!DECL_SAVED_TREE (fn))
1111 return false;
1113 /* If we're not inlining at all, then we cannot inline this function. */
1114 else if (!flag_inline_trees)
1115 inlinable = false;
1117 /* Only try to inline functions if DECL_INLINE is set. This should be
1118 true for all functions declared `inline', and for all other functions
1119 as well with -finline-functions.
1121 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1122 it's the front-end that must set DECL_INLINE in this case, because
1123 dwarf2out loses if a function that does not have DECL_INLINE set is
1124 inlined anyway. That is why we have both DECL_INLINE and
1125 DECL_DECLARED_INLINE_P. */
1126 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1127 here should be redundant. */
1128 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1129 inlinable = false;
1131 else if (inline_forbidden_p (fn))
1133 /* See if we should warn about uninlinable functions. Previously,
1134 some of these warnings would be issued while trying to expand
1135 the function inline, but that would cause multiple warnings
1136 about functions that would for example call alloca. But since
1137 this a property of the function, just one warning is enough.
1138 As a bonus we can now give more details about the reason why a
1139 function is not inlinable.
1140 We only warn for functions declared `inline' by the user. */
1141 bool do_warning = (warn_inline
1142 && DECL_INLINE (fn)
1143 && DECL_DECLARED_INLINE_P (fn)
1144 && !DECL_IN_SYSTEM_HEADER (fn));
1146 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1147 sorry (inline_forbidden_reason, fn, fn);
1148 else if (do_warning)
1149 warning (inline_forbidden_reason, fn, fn);
1151 inlinable = false;
1154 /* Squirrel away the result so that we don't have to check again. */
1155 DECL_UNINLINABLE (fn) = !inlinable;
1157 return inlinable;
1160 /* Used by estimate_num_insns. Estimate number of instructions seen
1161 by given statement. */
1163 static tree
1164 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1166 int *count = data;
1167 tree x = *tp;
1169 if (TYPE_P (x) || DECL_P (x))
1171 *walk_subtrees = 0;
1172 return NULL;
1174 /* Assume that constants and references counts nothing. These should
1175 be majorized by amount of operations among them we count later
1176 and are common target of CSE and similar optimizations. */
1177 else if (TREE_CODE_CLASS (TREE_CODE (x)) == 'c'
1178 || TREE_CODE_CLASS (TREE_CODE (x)) == 'r')
1179 return NULL;
1181 switch (TREE_CODE (x))
1183 /* Containers have no cost. */
1184 case TREE_LIST:
1185 case TREE_VEC:
1186 case BLOCK:
1187 case COMPONENT_REF:
1188 case BIT_FIELD_REF:
1189 case INDIRECT_REF:
1190 case ARRAY_REF:
1191 case ARRAY_RANGE_REF:
1192 case OBJ_TYPE_REF:
1193 case EXC_PTR_EXPR: /* ??? */
1194 case FILTER_EXPR: /* ??? */
1195 case COMPOUND_EXPR:
1196 case BIND_EXPR:
1197 case LABELED_BLOCK_EXPR:
1198 case WITH_CLEANUP_EXPR:
1199 case NOP_EXPR:
1200 case VIEW_CONVERT_EXPR:
1201 case SAVE_EXPR:
1202 case ADDR_EXPR:
1203 case COMPLEX_EXPR:
1204 case EXIT_BLOCK_EXPR:
1205 case CASE_LABEL_EXPR:
1206 case SSA_NAME:
1207 case CATCH_EXPR:
1208 case EH_FILTER_EXPR:
1209 case STATEMENT_LIST:
1210 case ERROR_MARK:
1211 case NON_LVALUE_EXPR:
1212 case FDESC_EXPR:
1213 case VA_ARG_EXPR:
1214 case TRY_CATCH_EXPR:
1215 case TRY_FINALLY_EXPR:
1216 case LABEL_EXPR:
1217 case GOTO_EXPR:
1218 case RETURN_EXPR:
1219 case EXIT_EXPR:
1220 case LOOP_EXPR:
1221 case PHI_NODE:
1222 case WITH_SIZE_EXPR:
1223 break;
1225 /* We don't account constants for now. Assume that the cost is amortized
1226 by operations that do use them. We may re-consider this decision once
1227 we are able to optimize the tree before estimating it's size and break
1228 out static initializers. */
1229 case IDENTIFIER_NODE:
1230 case INTEGER_CST:
1231 case REAL_CST:
1232 case COMPLEX_CST:
1233 case VECTOR_CST:
1234 case STRING_CST:
1235 *walk_subtrees = 0;
1236 return NULL;
1238 /* Recognize assignments of large structures and constructors of
1239 big arrays. */
1240 case INIT_EXPR:
1241 case MODIFY_EXPR:
1242 x = TREE_OPERAND (x, 0);
1243 /* FALLTHRU */
1244 case TARGET_EXPR:
1245 case CONSTRUCTOR:
1247 HOST_WIDE_INT size;
1249 size = int_size_in_bytes (TREE_TYPE (x));
1251 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1252 *count += 10;
1253 else
1254 *count += ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1256 break;
1258 /* Assign cost of 1 to usual operations.
1259 ??? We may consider mapping RTL costs to this. */
1260 case COND_EXPR:
1262 case PLUS_EXPR:
1263 case MINUS_EXPR:
1264 case MULT_EXPR:
1266 case FIX_TRUNC_EXPR:
1267 case FIX_CEIL_EXPR:
1268 case FIX_FLOOR_EXPR:
1269 case FIX_ROUND_EXPR:
1271 case NEGATE_EXPR:
1272 case FLOAT_EXPR:
1273 case MIN_EXPR:
1274 case MAX_EXPR:
1275 case ABS_EXPR:
1277 case LSHIFT_EXPR:
1278 case RSHIFT_EXPR:
1279 case LROTATE_EXPR:
1280 case RROTATE_EXPR:
1282 case BIT_IOR_EXPR:
1283 case BIT_XOR_EXPR:
1284 case BIT_AND_EXPR:
1285 case BIT_NOT_EXPR:
1287 case TRUTH_ANDIF_EXPR:
1288 case TRUTH_ORIF_EXPR:
1289 case TRUTH_AND_EXPR:
1290 case TRUTH_OR_EXPR:
1291 case TRUTH_XOR_EXPR:
1292 case TRUTH_NOT_EXPR:
1294 case LT_EXPR:
1295 case LE_EXPR:
1296 case GT_EXPR:
1297 case GE_EXPR:
1298 case EQ_EXPR:
1299 case NE_EXPR:
1300 case ORDERED_EXPR:
1301 case UNORDERED_EXPR:
1303 case UNLT_EXPR:
1304 case UNLE_EXPR:
1305 case UNGT_EXPR:
1306 case UNGE_EXPR:
1307 case UNEQ_EXPR:
1308 case LTGT_EXPR:
1310 case CONVERT_EXPR:
1312 case CONJ_EXPR:
1314 case PREDECREMENT_EXPR:
1315 case PREINCREMENT_EXPR:
1316 case POSTDECREMENT_EXPR:
1317 case POSTINCREMENT_EXPR:
1319 case SWITCH_EXPR:
1321 case ASM_EXPR:
1323 case RESX_EXPR:
1324 *count += 1;
1325 break;
1327 /* Few special cases of expensive operations. This is useful
1328 to avoid inlining on functions having too many of these. */
1329 case TRUNC_DIV_EXPR:
1330 case CEIL_DIV_EXPR:
1331 case FLOOR_DIV_EXPR:
1332 case ROUND_DIV_EXPR:
1333 case EXACT_DIV_EXPR:
1334 case TRUNC_MOD_EXPR:
1335 case CEIL_MOD_EXPR:
1336 case FLOOR_MOD_EXPR:
1337 case ROUND_MOD_EXPR:
1338 case RDIV_EXPR:
1339 *count += 10;
1340 break;
1341 case CALL_EXPR:
1343 tree decl = get_callee_fndecl (x);
1345 if (decl && DECL_BUILT_IN (decl))
1346 switch (DECL_FUNCTION_CODE (decl))
1348 case BUILT_IN_CONSTANT_P:
1349 *walk_subtrees = 0;
1350 return NULL_TREE;
1351 case BUILT_IN_EXPECT:
1352 return NULL_TREE;
1353 default:
1354 break;
1356 *count += 10;
1357 break;
1359 default:
1360 /* Abort here se we know we don't miss any nodes. */
1361 abort ();
1363 return NULL;
1366 /* Estimate number of instructions that will be created by expanding EXPR. */
1369 estimate_num_insns (tree expr)
1371 int num = 0;
1372 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1373 return num;
1376 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1378 static tree
1379 expand_call_inline (tree *tp, int *walk_subtrees, void *data)
1381 inline_data *id;
1382 tree t;
1383 tree expr;
1384 tree stmt;
1385 tree use_retvar;
1386 tree decl;
1387 tree fn;
1388 tree arg_inits;
1389 tree *inlined_body;
1390 splay_tree st;
1391 tree args;
1392 tree return_slot_addr;
1393 tree modify_dest;
1394 location_t saved_location;
1395 struct cgraph_edge *edge;
1396 const char *reason;
1398 /* See what we've got. */
1399 id = (inline_data *) data;
1400 t = *tp;
1402 /* Set input_location here so we get the right instantiation context
1403 if we call instantiate_decl from inlinable_function_p. */
1404 saved_location = input_location;
1405 if (EXPR_HAS_LOCATION (t))
1406 input_location = EXPR_LOCATION (t);
1408 /* Recurse, but letting recursive invocations know that we are
1409 inside the body of a TARGET_EXPR. */
1410 if (TREE_CODE (*tp) == TARGET_EXPR)
1412 #if 0
1413 int i, len = first_rtl_op (TARGET_EXPR);
1415 /* We're walking our own subtrees. */
1416 *walk_subtrees = 0;
1418 /* Actually walk over them. This loop is the body of
1419 walk_trees, omitting the case where the TARGET_EXPR
1420 itself is handled. */
1421 for (i = 0; i < len; ++i)
1423 if (i == 2)
1424 ++id->in_target_cleanup_p;
1425 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
1426 id->tree_pruner);
1427 if (i == 2)
1428 --id->in_target_cleanup_p;
1431 goto egress;
1432 #endif
1435 if (TYPE_P (t))
1436 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1437 them should not be expanded. This can happen if the type is a
1438 dynamic array type, for example. */
1439 *walk_subtrees = 0;
1441 /* From here on, we're only interested in CALL_EXPRs. */
1442 if (TREE_CODE (t) != CALL_EXPR)
1443 goto egress;
1445 /* First, see if we can figure out what function is being called.
1446 If we cannot, then there is no hope of inlining the function. */
1447 fn = get_callee_fndecl (t);
1448 if (!fn)
1449 goto egress;
1451 /* Turn forward declarations into real ones. */
1452 fn = cgraph_node (fn)->decl;
1454 /* If fn is a declaration of a function in a nested scope that was
1455 globally declared inline, we don't set its DECL_INITIAL.
1456 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1457 C++ front-end uses it for cdtors to refer to their internal
1458 declarations, that are not real functions. Fortunately those
1459 don't have trees to be saved, so we can tell by checking their
1460 DECL_SAVED_TREE. */
1461 if (! DECL_INITIAL (fn)
1462 && DECL_ABSTRACT_ORIGIN (fn)
1463 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1464 fn = DECL_ABSTRACT_ORIGIN (fn);
1466 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1467 Kill this check once this is fixed. */
1468 if (!id->current_node->analyzed)
1469 goto egress;
1471 edge = cgraph_edge (id->current_node, t);
1473 /* Constant propagation on argument done during previous inlining
1474 may create new direct call. Produce an edge for it. */
1475 if (!edge)
1477 struct cgraph_node *dest = cgraph_node (fn);
1479 /* We have missing edge in the callgraph. This can happen in one case
1480 where previous inlining turned indirect call into direct call by
1481 constant propagating arguments. In all other cases we hit a bug
1482 (incorrect node sharing is most common reason for missing edges. */
1483 if (!dest->needed)
1484 abort ();
1485 cgraph_create_edge (id->node, dest, t)->inline_failed
1486 = N_("originally indirect function call not considered for inlining");
1487 goto egress;
1490 /* Don't try to inline functions that are not well-suited to
1491 inlining. */
1492 if (!cgraph_inline_p (edge, &reason))
1494 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1496 sorry ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
1497 sorry ("called from here");
1499 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1500 && !DECL_IN_SYSTEM_HEADER (fn)
1501 && strlen (reason))
1503 warning ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
1504 warning ("called from here");
1506 goto egress;
1509 #ifdef ENABLE_CHECKING
1510 if (edge->callee->decl != id->node->decl)
1511 verify_cgraph_node (edge->callee);
1512 #endif
1514 if (! lang_hooks.tree_inlining.start_inlining (fn))
1515 goto egress;
1517 /* Build a block containing code to initialize the arguments, the
1518 actual inline expansion of the body, and a label for the return
1519 statements within the function to jump to. The type of the
1520 statement expression is the return type of the function call. */
1521 stmt = NULL;
1522 expr = build (BIND_EXPR, void_type_node, NULL_TREE,
1523 stmt, make_node (BLOCK));
1524 BLOCK_ABSTRACT_ORIGIN (BIND_EXPR_BLOCK (expr)) = fn;
1526 /* Local declarations will be replaced by their equivalents in this
1527 map. */
1528 st = id->decl_map;
1529 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
1530 NULL, NULL);
1532 /* Initialize the parameters. */
1533 args = TREE_OPERAND (t, 1);
1534 return_slot_addr = NULL_TREE;
1535 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
1537 return_slot_addr = TREE_VALUE (args);
1538 args = TREE_CHAIN (args);
1539 TREE_TYPE (expr) = void_type_node;
1542 arg_inits = initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2),
1543 fn, expr);
1544 if (arg_inits)
1546 /* Expand any inlined calls in the initializers. Do this before we
1547 push FN on the stack of functions we are inlining; we want to
1548 inline calls to FN that appear in the initializers for the
1549 parameters.
1551 Note we need to save and restore the saved tree statement iterator
1552 to avoid having it clobbered by expand_calls_inline. */
1553 tree_stmt_iterator save_tsi;
1555 save_tsi = id->tsi;
1556 expand_calls_inline (&arg_inits, id);
1557 id->tsi = save_tsi;
1559 /* And add them to the tree. */
1560 append_to_statement_list (arg_inits, &BIND_EXPR_BODY (expr));
1563 /* Record the function we are about to inline so that we can avoid
1564 recursing into it. */
1565 VARRAY_PUSH_TREE (id->fns, fn);
1567 /* Record the function we are about to inline if optimize_function
1568 has not been called on it yet and we don't have it in the list. */
1569 if (! DECL_INLINED_FNS (fn))
1571 int i;
1573 for (i = VARRAY_ACTIVE_SIZE (id->inlined_fns) - 1; i >= 0; i--)
1574 if (VARRAY_TREE (id->inlined_fns, i) == fn)
1575 break;
1576 if (i < 0)
1577 VARRAY_PUSH_TREE (id->inlined_fns, fn);
1580 /* Return statements in the function body will be replaced by jumps
1581 to the RET_LABEL. */
1582 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1583 DECL_ARTIFICIAL (id->ret_label) = 1;
1584 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
1585 insert_decl_map (id, id->ret_label, id->ret_label);
1587 if (! DECL_INITIAL (fn)
1588 || TREE_CODE (DECL_INITIAL (fn)) != BLOCK)
1589 abort ();
1591 /* Find the lhs to which the result of this call is assigned. */
1592 modify_dest = tsi_stmt (id->tsi);
1593 if (TREE_CODE (modify_dest) == MODIFY_EXPR)
1594 modify_dest = TREE_OPERAND (modify_dest, 0);
1595 else
1596 modify_dest = NULL;
1598 /* Declare the return variable for the function. */
1599 decl = declare_return_variable (id, return_slot_addr,
1600 modify_dest, &use_retvar);
1602 /* After we've initialized the parameters, we insert the body of the
1603 function itself. */
1605 struct cgraph_node *old_node = id->current_node;
1607 id->current_node = edge->callee;
1608 append_to_statement_list (copy_body (id), &BIND_EXPR_BODY (expr));
1609 id->current_node = old_node;
1611 inlined_body = &BIND_EXPR_BODY (expr);
1613 /* After the body of the function comes the RET_LABEL. This must come
1614 before we evaluate the returned value below, because that evaluation
1615 may cause RTL to be generated. */
1616 if (TREE_USED (id->ret_label))
1618 tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
1619 append_to_statement_list (label, &BIND_EXPR_BODY (expr));
1622 /* Clean up. */
1623 splay_tree_delete (id->decl_map);
1624 id->decl_map = st;
1626 /* The new expression has side-effects if the old one did. */
1627 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
1629 tsi_link_before (&id->tsi, expr, TSI_SAME_STMT);
1631 /* If the inlined function returns a result that we care about,
1632 then we're going to need to splice in a MODIFY_EXPR. Otherwise
1633 the call was a standalone statement and we can just replace it
1634 with the BIND_EXPR inline representation of the called function. */
1635 if (!use_retvar || !modify_dest)
1636 *tsi_stmt_ptr (id->tsi) = build_empty_stmt ();
1637 else
1638 *tp = use_retvar;
1640 /* When we gimplify a function call, we may clear TREE_SIDE_EFFECTS on
1641 the call if it is to a "const" function. Thus the copy of
1642 TREE_SIDE_EFFECTS from the CALL_EXPR to the BIND_EXPR above with
1643 result in TREE_SIDE_EFFECTS not being set for the inlined copy of a
1644 "const" function.
1646 Unfortunately, that is wrong as inlining the function can create/expose
1647 interesting side effects (such as setting of a return value).
1649 The easiest solution is to simply recalculate TREE_SIDE_EFFECTS for
1650 the toplevel expression. */
1651 recalculate_side_effects (expr);
1653 /* Update callgraph if needed. */
1654 cgraph_remove_node (edge->callee);
1656 /* Recurse into the body of the just inlined function. */
1657 expand_calls_inline (inlined_body, id);
1658 VARRAY_POP (id->fns);
1660 /* Don't walk into subtrees. We've already handled them above. */
1661 *walk_subtrees = 0;
1663 lang_hooks.tree_inlining.end_inlining (fn);
1665 /* Keep iterating. */
1666 egress:
1667 input_location = saved_location;
1668 return NULL_TREE;
1671 static void
1672 expand_calls_inline (tree *stmt_p, inline_data *id)
1674 tree stmt = *stmt_p;
1675 enum tree_code code = TREE_CODE (stmt);
1676 int dummy;
1678 switch (code)
1680 case STATEMENT_LIST:
1682 tree_stmt_iterator i;
1683 tree new;
1685 for (i = tsi_start (stmt); !tsi_end_p (i); )
1687 id->tsi = i;
1688 expand_calls_inline (tsi_stmt_ptr (i), id);
1690 new = tsi_stmt (i);
1691 if (TREE_CODE (new) == STATEMENT_LIST)
1693 tsi_link_before (&i, new, TSI_SAME_STMT);
1694 tsi_delink (&i);
1696 else
1697 tsi_next (&i);
1700 break;
1702 case COND_EXPR:
1703 expand_calls_inline (&COND_EXPR_THEN (stmt), id);
1704 expand_calls_inline (&COND_EXPR_ELSE (stmt), id);
1705 break;
1707 case CATCH_EXPR:
1708 expand_calls_inline (&CATCH_BODY (stmt), id);
1709 break;
1711 case EH_FILTER_EXPR:
1712 expand_calls_inline (&EH_FILTER_FAILURE (stmt), id);
1713 break;
1715 case TRY_CATCH_EXPR:
1716 case TRY_FINALLY_EXPR:
1717 expand_calls_inline (&TREE_OPERAND (stmt, 0), id);
1718 expand_calls_inline (&TREE_OPERAND (stmt, 1), id);
1719 break;
1721 case BIND_EXPR:
1722 expand_calls_inline (&BIND_EXPR_BODY (stmt), id);
1723 break;
1725 case COMPOUND_EXPR:
1726 /* We're gimple. We should have gotten rid of all these. */
1727 abort ();
1729 case RETURN_EXPR:
1730 stmt_p = &TREE_OPERAND (stmt, 0);
1731 stmt = *stmt_p;
1732 if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
1733 break;
1735 /* FALLTHRU */
1737 case MODIFY_EXPR:
1738 stmt_p = &TREE_OPERAND (stmt, 1);
1739 stmt = *stmt_p;
1740 if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
1742 stmt_p = &TREE_OPERAND (stmt, 0);
1743 stmt = *stmt_p;
1745 if (TREE_CODE (stmt) != CALL_EXPR)
1746 break;
1748 /* FALLTHRU */
1750 case CALL_EXPR:
1751 expand_call_inline (stmt_p, &dummy, id);
1752 break;
1754 default:
1755 break;
1759 /* Expand calls to inline functions in the body of FN. */
1761 void
1762 optimize_inline_calls (tree fn)
1764 inline_data id;
1765 tree prev_fn;
1766 tree ifn;
1768 /* There is no point in performing inlining if errors have already
1769 occurred -- and we might crash if we try to inline invalid
1770 code. */
1771 if (errorcount || sorrycount)
1772 return;
1774 /* Clear out ID. */
1775 memset (&id, 0, sizeof (id));
1777 id.current_node = id.node = cgraph_node (fn);
1778 /* Don't allow recursion into FN. */
1779 VARRAY_TREE_INIT (id.fns, 32, "fns");
1780 VARRAY_PUSH_TREE (id.fns, fn);
1781 /* Or any functions that aren't finished yet. */
1782 prev_fn = NULL_TREE;
1783 if (current_function_decl)
1785 VARRAY_PUSH_TREE (id.fns, current_function_decl);
1786 prev_fn = current_function_decl;
1789 prev_fn = lang_hooks.tree_inlining.add_pending_fn_decls (&id.fns, prev_fn);
1791 /* Create the list of functions this call will inline. */
1792 VARRAY_TREE_INIT (id.inlined_fns, 32, "inlined_fns");
1794 /* Keep track of the low-water mark, i.e., the point where the first
1795 real inlining is represented in ID.FNS. */
1796 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1798 /* Replace all calls to inline functions with the bodies of those
1799 functions. */
1800 id.tree_pruner = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
1801 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1803 /* Clean up. */
1804 htab_delete (id.tree_pruner);
1805 ifn = make_tree_vec (VARRAY_ACTIVE_SIZE (id.inlined_fns));
1806 if (VARRAY_ACTIVE_SIZE (id.inlined_fns))
1807 memcpy (&TREE_VEC_ELT (ifn, 0), &VARRAY_TREE (id.inlined_fns, 0),
1808 VARRAY_ACTIVE_SIZE (id.inlined_fns) * sizeof (tree));
1809 DECL_INLINED_FNS (fn) = ifn;
1811 #ifdef ENABLE_CHECKING
1813 struct cgraph_edge *e;
1815 verify_cgraph_node (id.node);
1817 /* Double check that we inlined everything we are supposed to inline. */
1818 for (e = id.node->callees; e; e = e->next_callee)
1819 if (!e->inline_failed)
1820 abort ();
1822 #endif
1825 /* FN is a function that has a complete body, and CLONE is a function whose
1826 body is to be set to a copy of FN, mapping argument declarations according
1827 to the ARG_MAP splay_tree. */
1829 void
1830 clone_body (tree clone, tree fn, void *arg_map)
1832 inline_data id;
1834 /* Clone the body, as if we were making an inline call. But, remap the
1835 parameters in the callee to the parameters of caller. If there's an
1836 in-charge parameter, map it to an appropriate constant. */
1837 memset (&id, 0, sizeof (id));
1838 VARRAY_TREE_INIT (id.fns, 2, "fns");
1839 VARRAY_PUSH_TREE (id.fns, clone);
1840 VARRAY_PUSH_TREE (id.fns, fn);
1841 id.decl_map = (splay_tree)arg_map;
1843 /* Cloning is treated slightly differently from inlining. Set
1844 CLONING_P so that it's clear which operation we're performing. */
1845 id.cloning_p = true;
1847 /* Actually copy the body. */
1848 append_to_statement_list_force (copy_body (&id), &DECL_SAVED_TREE (clone));
1851 /* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
1852 in *arg_copy and of the static chain, if any, in *sc_copy. */
1854 tree
1855 save_body (tree fn, tree *arg_copy, tree *sc_copy)
1857 inline_data id;
1858 tree body, *parg;
1860 memset (&id, 0, sizeof (id));
1861 VARRAY_TREE_INIT (id.fns, 1, "fns");
1862 VARRAY_PUSH_TREE (id.fns, fn);
1863 id.node = cgraph_node (fn);
1864 id.saving_p = true;
1865 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
1866 *arg_copy = DECL_ARGUMENTS (fn);
1868 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
1870 tree new = copy_node (*parg);
1872 lang_hooks.dup_lang_specific_decl (new);
1873 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
1874 insert_decl_map (&id, *parg, new);
1875 TREE_CHAIN (new) = TREE_CHAIN (*parg);
1876 *parg = new;
1879 *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1880 if (*sc_copy)
1882 tree new = copy_node (*sc_copy);
1884 lang_hooks.dup_lang_specific_decl (new);
1885 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
1886 insert_decl_map (&id, *sc_copy, new);
1887 TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
1888 *sc_copy = new;
1891 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
1893 /* Actually copy the body. */
1894 body = copy_body (&id);
1896 /* Clean up. */
1897 splay_tree_delete (id.decl_map);
1898 return body;
1901 #define WALK_SUBTREE(NODE) \
1902 do \
1904 result = walk_tree (&(NODE), func, data, htab); \
1905 if (result) \
1906 return result; \
1908 while (0)
1910 /* This is a subroutine of walk_tree that walks field of TYPE that are to
1911 be walked whenever a type is seen in the tree. Rest of operands and return
1912 value are as for walk_tree. */
1914 static tree
1915 walk_type_fields (tree type, walk_tree_fn func, void *data, void *htab)
1917 tree result = NULL_TREE;
1919 switch (TREE_CODE (type))
1921 case POINTER_TYPE:
1922 case REFERENCE_TYPE:
1923 /* We have to worry about mutually recursive pointers. These can't
1924 be written in C. They can in Ada. It's pathlogical, but
1925 there's an ACATS test (c38102a) that checks it. Deal with this
1926 by checking if we're pointing to another pointer, that one
1927 points to another pointer, that one does too, and we have no htab.
1928 If so, get a hash table. We check three levels deep to avoid
1929 the cost of the hash table if we don't need one. */
1930 if (POINTER_TYPE_P (TREE_TYPE (type))
1931 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
1932 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
1933 && !htab)
1935 result = walk_tree_without_duplicates (&TREE_TYPE (type),
1936 func, data);
1937 if (result)
1938 return result;
1940 break;
1943 /* ... fall through ... */
1945 case COMPLEX_TYPE:
1946 WALK_SUBTREE (TREE_TYPE (type));
1947 break;
1949 case METHOD_TYPE:
1950 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
1952 /* Fall through. */
1954 case FUNCTION_TYPE:
1955 WALK_SUBTREE (TREE_TYPE (type));
1957 tree arg;
1959 /* We never want to walk into default arguments. */
1960 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
1961 WALK_SUBTREE (TREE_VALUE (arg));
1963 break;
1965 case ARRAY_TYPE:
1966 /* Don't follow this nodes's type if a pointer for fear that we'll
1967 have infinite recursion. Those types are uninteresting anyway. */
1968 if (!POINTER_TYPE_P (TREE_TYPE (type))
1969 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE)
1970 WALK_SUBTREE (TREE_TYPE (type));
1971 WALK_SUBTREE (TYPE_DOMAIN (type));
1972 break;
1974 case BOOLEAN_TYPE:
1975 case ENUMERAL_TYPE:
1976 case INTEGER_TYPE:
1977 case CHAR_TYPE:
1978 case REAL_TYPE:
1979 WALK_SUBTREE (TYPE_MIN_VALUE (type));
1980 WALK_SUBTREE (TYPE_MAX_VALUE (type));
1981 break;
1983 case OFFSET_TYPE:
1984 WALK_SUBTREE (TREE_TYPE (type));
1985 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
1986 break;
1988 default:
1989 break;
1992 return NULL_TREE;
1995 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
1996 called with the DATA and the address of each sub-tree. If FUNC returns a
1997 non-NULL value, the traversal is aborted, and the value returned by FUNC
1998 is returned. If HTAB is non-NULL it is used to record the nodes visited,
1999 and to avoid visiting a node more than once. */
2001 tree
2002 walk_tree (tree *tp, walk_tree_fn func, void *data, void *htab_)
2004 htab_t htab = (htab_t) htab_;
2005 enum tree_code code;
2006 int walk_subtrees;
2007 tree result;
2009 #define WALK_SUBTREE_TAIL(NODE) \
2010 do \
2012 tp = & (NODE); \
2013 goto tail_recurse; \
2015 while (0)
2017 tail_recurse:
2018 /* Skip empty subtrees. */
2019 if (!*tp)
2020 return NULL_TREE;
2022 if (htab)
2024 void **slot;
2026 /* Don't walk the same tree twice, if the user has requested
2027 that we avoid doing so. */
2028 slot = htab_find_slot (htab, *tp, INSERT);
2029 if (*slot)
2030 return NULL_TREE;
2031 *slot = *tp;
2034 /* Call the function. */
2035 walk_subtrees = 1;
2036 result = (*func) (tp, &walk_subtrees, data);
2038 /* If we found something, return it. */
2039 if (result)
2040 return result;
2042 code = TREE_CODE (*tp);
2044 /* Even if we didn't, FUNC may have decided that there was nothing
2045 interesting below this point in the tree. */
2046 if (!walk_subtrees)
2048 if (code == TREE_LIST)
2049 /* But we still need to check our siblings. */
2050 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2051 else
2052 return NULL_TREE;
2055 result = lang_hooks.tree_inlining.walk_subtrees (tp, &walk_subtrees, func,
2056 data, htab);
2057 if (result || ! walk_subtrees)
2058 return result;
2060 /* If this is a DECL_EXPR, walk into various fields of the type that it's
2061 defining. We only want to walk into these fields of a type in this
2062 case. Note that decls get walked as part of the processing of a
2063 BIND_EXPR.
2065 ??? Precisely which fields of types that we are supposed to walk in
2066 this case vs. the normal case aren't well defined. */
2067 if (code == DECL_EXPR
2068 && TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL
2069 && TREE_CODE (TREE_TYPE (DECL_EXPR_DECL (*tp))) != ERROR_MARK)
2071 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
2073 /* Call the function for the type. See if it returns anything or
2074 doesn't want us to continue. If we are to continue, walk both
2075 the normal fields and those for the declaration case. */
2076 result = (*func) (type_p, &walk_subtrees, data);
2077 if (result || !walk_subtrees)
2078 return NULL_TREE;
2080 result = walk_type_fields (*type_p, func, data, htab_);
2081 if (result)
2082 return result;
2084 WALK_SUBTREE (TYPE_SIZE (*type_p));
2085 WALK_SUBTREE (TYPE_SIZE_UNIT (*type_p));
2087 /* If this is a record type, also walk the fields. */
2088 if (TREE_CODE (*type_p) == RECORD_TYPE
2089 || TREE_CODE (*type_p) == UNION_TYPE
2090 || TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2092 tree field;
2094 for (field = TYPE_FIELDS (*type_p); field;
2095 field = TREE_CHAIN (field))
2097 /* We'd like to look at the type of the field, but we can easily
2098 get infinite recursion. So assume it's pointed to elsewhere
2099 in the tree. Also, ignore things that aren't fields. */
2100 if (TREE_CODE (field) != FIELD_DECL)
2101 continue;
2103 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
2104 WALK_SUBTREE (DECL_SIZE (field));
2105 WALK_SUBTREE (DECL_SIZE_UNIT (field));
2106 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2107 WALK_SUBTREE (DECL_QUALIFIER (field));
2112 else if (code != EXIT_BLOCK_EXPR
2113 && code != SAVE_EXPR
2114 && code != BIND_EXPR
2115 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2117 int i, len;
2119 /* Walk over all the sub-trees of this operand. */
2120 len = first_rtl_op (code);
2121 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
2122 But, we only want to walk once. */
2123 if (code == TARGET_EXPR
2124 && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
2125 --len;
2127 /* Go through the subtrees. We need to do this in forward order so
2128 that the scope of a FOR_EXPR is handled properly. */
2129 #ifdef DEBUG_WALK_TREE
2130 for (i = 0; i < len; ++i)
2131 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2132 #else
2133 for (i = 0; i < len - 1; ++i)
2134 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2136 if (len)
2138 /* The common case is that we may tail recurse here. */
2139 if (code != BIND_EXPR
2140 && !TREE_CHAIN (*tp))
2141 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
2142 else
2143 WALK_SUBTREE (TREE_OPERAND (*tp, len - 1));
2145 #endif
2148 /* If this is a type, walk the needed fields in the type. */
2149 else if (TYPE_P (*tp))
2151 result = walk_type_fields (*tp, func, data, htab_);
2152 if (result)
2153 return result;
2155 else
2157 /* Not one of the easy cases. We must explicitly go through the
2158 children. */
2159 switch (code)
2161 case ERROR_MARK:
2162 case IDENTIFIER_NODE:
2163 case INTEGER_CST:
2164 case REAL_CST:
2165 case VECTOR_CST:
2166 case STRING_CST:
2167 case BLOCK:
2168 case PLACEHOLDER_EXPR:
2169 case SSA_NAME:
2170 case FIELD_DECL:
2171 case RESULT_DECL:
2172 /* None of thse have subtrees other than those already walked
2173 above. */
2174 break;
2176 case TREE_LIST:
2177 WALK_SUBTREE (TREE_VALUE (*tp));
2178 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2179 break;
2181 case TREE_VEC:
2183 int len = TREE_VEC_LENGTH (*tp);
2185 if (len == 0)
2186 break;
2188 /* Walk all elements but the first. */
2189 while (--len)
2190 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
2192 /* Now walk the first one as a tail call. */
2193 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
2196 case COMPLEX_CST:
2197 WALK_SUBTREE (TREE_REALPART (*tp));
2198 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
2200 case CONSTRUCTOR:
2201 WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
2203 case EXIT_BLOCK_EXPR:
2204 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 1));
2206 case SAVE_EXPR:
2207 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
2209 case BIND_EXPR:
2211 tree decl;
2212 for (decl = BIND_EXPR_VARS (*tp); decl; decl = TREE_CHAIN (decl))
2214 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
2215 into declarations that are just mentioned, rather than
2216 declared; they don't really belong to this part of the tree.
2217 And, we can see cycles: the initializer for a declaration
2218 can refer to the declaration itself. */
2219 WALK_SUBTREE (DECL_INITIAL (decl));
2220 WALK_SUBTREE (DECL_SIZE (decl));
2221 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
2223 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
2226 case STATEMENT_LIST:
2228 tree_stmt_iterator i;
2229 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
2230 WALK_SUBTREE (*tsi_stmt_ptr (i));
2232 break;
2234 default:
2235 /* ??? This could be a language-defined node. We really should make
2236 a hook for it, but right now just ignore it. */
2237 break;
2241 /* We didn't find what we were looking for. */
2242 return NULL_TREE;
2244 #undef WALK_SUBTREE
2245 #undef WALK_SUBTREE_TAIL
2248 /* Like walk_tree, but does not walk duplicate nodes more than once. */
2250 tree
2251 walk_tree_without_duplicates (tree *tp, walk_tree_fn func, void *data)
2253 tree result;
2254 htab_t htab;
2256 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
2257 result = walk_tree (tp, func, data, htab);
2258 htab_delete (htab);
2259 return result;
2262 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2264 tree
2265 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2267 enum tree_code code = TREE_CODE (*tp);
2269 /* We make copies of most nodes. */
2270 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2271 || TREE_CODE_CLASS (code) == 'c'
2272 || code == TREE_LIST
2273 || code == TREE_VEC
2274 || code == TYPE_DECL)
2276 /* Because the chain gets clobbered when we make a copy, we save it
2277 here. */
2278 tree chain = TREE_CHAIN (*tp);
2279 tree new;
2281 /* Copy the node. */
2282 new = copy_node (*tp);
2284 /* Propagate mudflap marked-ness. */
2285 if (flag_mudflap && mf_marked_p (*tp))
2286 mf_mark (new);
2288 *tp = new;
2290 /* Now, restore the chain, if appropriate. That will cause
2291 walk_tree to walk into the chain as well. */
2292 if (code == PARM_DECL || code == TREE_LIST)
2293 TREE_CHAIN (*tp) = chain;
2295 /* For now, we don't update BLOCKs when we make copies. So, we
2296 have to nullify all BIND_EXPRs. */
2297 if (TREE_CODE (*tp) == BIND_EXPR)
2298 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2301 else if (TREE_CODE_CLASS (code) == 't')
2302 *walk_subtrees = 0;
2303 else if (TREE_CODE_CLASS (code) == 'd')
2304 *walk_subtrees = 0;
2305 else if (code == STATEMENT_LIST)
2306 abort ();
2308 return NULL_TREE;
2311 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2312 information indicating to what new SAVE_EXPR this one should be mapped,
2313 use that one. Otherwise, create a new node and enter it in ST. */
2315 void
2316 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2318 splay_tree st = (splay_tree) st_;
2319 splay_tree_node n;
2320 tree t;
2322 /* See if we already encountered this SAVE_EXPR. */
2323 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2325 /* If we didn't already remap this SAVE_EXPR, do so now. */
2326 if (!n)
2328 t = copy_node (*tp);
2330 /* Remember this SAVE_EXPR. */
2331 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2332 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2333 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2335 else
2337 /* We've already walked into this SAVE_EXPR; don't do it again. */
2338 *walk_subtrees = 0;
2339 t = (tree) n->value;
2342 /* Replace this SAVE_EXPR with the copy. */
2343 *tp = t;
2346 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2347 copies the declaration and enters it in the splay_tree in DATA (which is
2348 really an `inline_data *'). */
2350 static tree
2351 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2352 void *data)
2354 inline_data *id = (inline_data *) data;
2356 /* Don't walk into types. */
2357 if (TYPE_P (*tp))
2358 *walk_subtrees = 0;
2360 else if (TREE_CODE (*tp) == LABEL_EXPR)
2362 tree decl = TREE_OPERAND (*tp, 0);
2364 /* Copy the decl and remember the copy. */
2365 insert_decl_map (id, decl,
2366 copy_decl_for_inlining (decl, DECL_CONTEXT (decl),
2367 DECL_CONTEXT (decl)));
2370 return NULL_TREE;
2373 /* Called via walk_tree when an expression is unsaved. Using the
2374 splay_tree pointed to by ST (which is really a `splay_tree'),
2375 remaps all local declarations to appropriate replacements. */
2377 static tree
2378 unsave_r (tree *tp, int *walk_subtrees, void *data)
2380 inline_data *id = (inline_data *) data;
2381 splay_tree st = id->decl_map;
2382 splay_tree_node n;
2384 /* Only a local declaration (variable or label). */
2385 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2386 || TREE_CODE (*tp) == LABEL_DECL)
2388 /* Lookup the declaration. */
2389 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2391 /* If it's there, remap it. */
2392 if (n)
2393 *tp = (tree) n->value;
2396 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2397 copy_statement_list (tp);
2398 else if (TREE_CODE (*tp) == BIND_EXPR)
2399 copy_bind_expr (tp, walk_subtrees, id);
2400 else if (TREE_CODE (*tp) == SAVE_EXPR)
2401 remap_save_expr (tp, st, walk_subtrees);
2402 else
2404 copy_tree_r (tp, walk_subtrees, NULL);
2406 /* Do whatever unsaving is required. */
2407 unsave_expr_1 (*tp);
2410 /* Keep iterating. */
2411 return NULL_TREE;
2414 /* Default lang hook for "unsave_expr_now". Copies everything in EXPR and
2415 replaces variables, labels and SAVE_EXPRs local to EXPR. */
2417 tree
2418 lhd_unsave_expr_now (tree expr)
2420 inline_data id;
2422 /* There's nothing to do for NULL_TREE. */
2423 if (expr == 0)
2424 return expr;
2426 /* Set up ID. */
2427 memset (&id, 0, sizeof (id));
2428 VARRAY_TREE_INIT (id.fns, 1, "fns");
2429 VARRAY_PUSH_TREE (id.fns, current_function_decl);
2430 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2432 /* Walk the tree once to find local labels. */
2433 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2435 /* Walk the tree again, copying, remapping, and unsaving. */
2436 walk_tree (&expr, unsave_r, &id, NULL);
2438 /* Clean up. */
2439 splay_tree_delete (id.decl_map);
2441 return expr;
2444 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2446 static tree
2447 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2449 if (*tp == data)
2450 return (tree) data;
2451 else
2452 return NULL;
2455 bool
2456 debug_find_tree (tree top, tree search)
2458 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2461 /* Declare the variables created by the inliner. Add all the variables in
2462 VARS to BIND_EXPR. */
2464 static void
2465 declare_inline_vars (tree bind_expr, tree vars)
2467 tree t;
2468 for (t = vars; t; t = TREE_CHAIN (t))
2469 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2471 add_var_to_bind_expr (bind_expr, vars);