PR tree-optimization/13000
[official-gcc.git] / gcc / tree-inline.c
blobb46276a4e4ce5190f93e12352b78672615bde117
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "integrate.h"
36 #include "varray.h"
37 #include "hashtab.h"
38 #include "pointer-set.h"
39 #include "splay-tree.h"
40 #include "langhooks.h"
41 #include "cgraph.h"
42 #include "intl.h"
43 #include "tree-mudflap.h"
44 #include "tree-flow.h"
45 #include "function.h"
46 #include "diagnostic.h"
47 #include "debug.h"
49 /* I'm not real happy about this, but we need to handle gimple and
50 non-gimple trees. */
51 #include "tree-iterator.h"
52 #include "tree-gimple.h"
54 /* 0 if we should not perform inlining.
55 1 if we should expand functions calls inline at the tree level.
56 2 if we should consider *all* functions to be inline
57 candidates. */
59 int flag_inline_trees = 0;
61 /* To Do:
63 o In order to make inlining-on-trees work, we pessimized
64 function-local static constants. In particular, they are now
65 always output, even when not addressed. Fix this by treating
66 function-local static constants just like global static
67 constants; the back-end already knows not to output them if they
68 are not needed.
70 o Provide heuristics to clamp inlining of recursive template
71 calls? */
73 /* Data required for function inlining. */
75 typedef struct inline_data
77 /* A stack of the functions we are inlining. For example, if we are
78 compiling `f', which calls `g', which calls `h', and we are
79 inlining the body of `h', the stack will contain, `h', followed
80 by `g', followed by `f'. The first few elements of the stack may
81 contain other functions that we know we should not recurse into,
82 even though they are not directly being inlined. */
83 varray_type fns;
84 /* The index of the first element of FNS that really represents an
85 inlined function. */
86 unsigned first_inlined_fn;
87 /* The label to jump to when a return statement is encountered. If
88 this value is NULL, then return statements will simply be
89 remapped as return statements, rather than as jumps. */
90 tree ret_label;
91 /* The VAR_DECL for the return value. */
92 tree retvar;
93 /* The map from local declarations in the inlined function to
94 equivalents in the function into which it is being inlined. */
95 splay_tree decl_map;
96 /* Nonzero if we are currently within the cleanup for a
97 TARGET_EXPR. */
98 int in_target_cleanup_p;
99 /* We use the same mechanism to build clones that we do to perform
100 inlining. However, there are a few places where we need to
101 distinguish between those two situations. This flag is true if
102 we are cloning, rather than inlining. */
103 bool cloning_p;
104 /* Similarly for saving function body. */
105 bool saving_p;
106 /* Hash table used to prevent walk_tree from visiting the same node
107 umpteen million times. */
108 htab_t tree_pruner;
109 /* Callgraph node of function we are inlining into. */
110 struct cgraph_node *node;
111 /* Callgraph node of currently inlined function. */
112 struct cgraph_node *current_node;
113 /* Statement iterator. We need this so we can keep the tree in
114 gimple form when we insert the inlined function. It is not
115 used when we are not dealing with gimple trees. */
116 tree_stmt_iterator tsi;
117 } inline_data;
119 /* Prototypes. */
121 /* The approximate number of instructions per statement. This number
122 need not be particularly accurate; it is used only to make
123 decisions about when a function is too big to inline. */
124 #define INSNS_PER_STMT (10)
126 static tree copy_body_r (tree *, int *, void *);
127 static tree copy_body (inline_data *);
128 static tree expand_call_inline (tree *, int *, void *);
129 static void expand_calls_inline (tree *, inline_data *);
130 static bool inlinable_function_p (tree);
131 static tree remap_decl (tree, inline_data *);
132 static tree remap_type (tree, inline_data *);
133 static tree initialize_inlined_parameters (inline_data *, tree,
134 tree, tree, tree);
135 static void remap_block (tree *, inline_data *);
136 static tree remap_decls (tree, inline_data *);
137 static void copy_bind_expr (tree *, int *, inline_data *);
138 static tree mark_local_for_remap_r (tree *, int *, void *);
139 static void unsave_expr_1 (tree);
140 static tree unsave_r (tree *, int *, void *);
141 static void declare_inline_vars (tree bind_expr, tree vars);
142 static void remap_save_expr (tree *, void *, int *);
144 /* Insert a tree->tree mapping for ID. Despite the name suggests
145 that the trees should be variables, it is used for more than that. */
147 static void
148 insert_decl_map (inline_data *id, tree key, tree value)
150 splay_tree_insert (id->decl_map, (splay_tree_key) key,
151 (splay_tree_value) value);
153 /* Always insert an identity map as well. If we see this same new
154 node again, we won't want to duplicate it a second time. */
155 if (key != value)
156 splay_tree_insert (id->decl_map, (splay_tree_key) value,
157 (splay_tree_value) value);
160 /* Remap DECL during the copying of the BLOCK tree for the function.
161 We are only called to remap local variables in the current function. */
163 static tree
164 remap_decl (tree decl, inline_data *id)
166 splay_tree_node n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
167 tree fn = VARRAY_TOP_TREE (id->fns);
169 /* See if we have remapped this declaration. If we didn't already have an
170 equivalent for this declaration, create one now. */
171 if (!n)
173 /* Make a copy of the variable or label. */
174 tree t = copy_decl_for_inlining (decl, fn, VARRAY_TREE (id->fns, 0));
176 /* Remap types, if necessary. */
177 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
178 if (TREE_CODE (t) == TYPE_DECL)
179 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
180 else if (TREE_CODE (t) == PARM_DECL)
181 DECL_ARG_TYPE_AS_WRITTEN (t)
182 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
184 /* Remap sizes as necessary. */
185 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
186 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
188 /* If fields, do likewise for offset and qualifier. */
189 if (TREE_CODE (t) == FIELD_DECL)
191 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
192 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
193 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
196 #if 0
197 /* FIXME handle anon aggrs. */
198 if (! DECL_NAME (t) && TREE_TYPE (t)
199 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
201 /* For a VAR_DECL of anonymous type, we must also copy the
202 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
203 tree members = NULL;
204 tree src;
206 for (src = DECL_ANON_UNION_ELEMS (t); src;
207 src = TREE_CHAIN (src))
209 tree member = remap_decl (TREE_VALUE (src), id);
211 gcc_assert (!TREE_PURPOSE (src));
212 members = tree_cons (NULL, member, members);
214 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
216 #endif
218 /* Remember it, so that if we encounter this local entity
219 again we can reuse this copy. */
220 insert_decl_map (id, decl, t);
221 return t;
224 return unshare_expr ((tree) n->value);
227 static tree
228 remap_type (tree type, inline_data *id)
230 splay_tree_node node;
231 tree new, t;
233 if (type == NULL)
234 return type;
236 /* See if we have remapped this type. */
237 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
238 if (node)
239 return (tree) node->value;
241 /* The type only needs remapping if it's variably modified by a variable
242 in the function we are inlining. */
243 if (! variably_modified_type_p (type, VARRAY_TOP_TREE (id->fns)))
245 insert_decl_map (id, type, type);
246 return type;
249 /* We do need a copy. build and register it now. If this is a pointer or
250 reference type, remap the designated type and make a new pointer or
251 reference type. */
252 if (TREE_CODE (type) == POINTER_TYPE)
254 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
255 TYPE_MODE (type),
256 TYPE_REF_CAN_ALIAS_ALL (type));
257 insert_decl_map (id, type, new);
258 return new;
260 else if (TREE_CODE (type) == REFERENCE_TYPE)
262 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
263 TYPE_MODE (type),
264 TYPE_REF_CAN_ALIAS_ALL (type));
265 insert_decl_map (id, type, new);
266 return new;
268 else
269 new = copy_node (type);
271 insert_decl_map (id, type, new);
273 /* This is a new type, not a copy of an old type. Need to reassociate
274 variants. We can handle everything except the main variant lazily. */
275 t = TYPE_MAIN_VARIANT (type);
276 if (type != t)
278 t = remap_type (t, id);
279 TYPE_MAIN_VARIANT (new) = t;
280 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
281 TYPE_NEXT_VARIANT (t) = new;
283 else
285 TYPE_MAIN_VARIANT (new) = new;
286 TYPE_NEXT_VARIANT (new) = NULL;
289 /* Lazily create pointer and reference types. */
290 TYPE_POINTER_TO (new) = NULL;
291 TYPE_REFERENCE_TO (new) = NULL;
293 switch (TREE_CODE (new))
295 case INTEGER_TYPE:
296 case REAL_TYPE:
297 case ENUMERAL_TYPE:
298 case BOOLEAN_TYPE:
299 case CHAR_TYPE:
300 t = TYPE_MIN_VALUE (new);
301 if (t && TREE_CODE (t) != INTEGER_CST)
302 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
304 t = TYPE_MAX_VALUE (new);
305 if (t && TREE_CODE (t) != INTEGER_CST)
306 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
307 return new;
309 case FUNCTION_TYPE:
310 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
311 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
312 return new;
314 case ARRAY_TYPE:
315 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
316 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
317 break;
319 case RECORD_TYPE:
320 case UNION_TYPE:
321 case QUAL_UNION_TYPE:
322 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
323 break;
325 case FILE_TYPE:
326 case OFFSET_TYPE:
327 default:
328 /* Shouldn't have been thought variable sized. */
329 gcc_unreachable ();
332 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
333 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
335 return new;
338 static tree
339 remap_decls (tree decls, inline_data *id)
341 tree old_var;
342 tree new_decls = NULL_TREE;
344 /* Remap its variables. */
345 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
347 tree new_var;
349 /* Remap the variable. */
350 new_var = remap_decl (old_var, id);
352 /* If we didn't remap this variable, so we can't mess with its
353 TREE_CHAIN. If we remapped this variable to the return slot, it's
354 already declared somewhere else, so don't declare it here. */
355 if (!new_var || new_var == id->retvar)
357 else
359 gcc_assert (DECL_P (new_var));
360 TREE_CHAIN (new_var) = new_decls;
361 new_decls = new_var;
365 return nreverse (new_decls);
368 /* Copy the BLOCK to contain remapped versions of the variables
369 therein. And hook the new block into the block-tree. */
371 static void
372 remap_block (tree *block, inline_data *id)
374 tree old_block;
375 tree new_block;
376 tree fn;
378 /* Make the new block. */
379 old_block = *block;
380 new_block = make_node (BLOCK);
381 TREE_USED (new_block) = TREE_USED (old_block);
382 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
383 *block = new_block;
385 /* Remap its variables. */
386 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
388 fn = VARRAY_TREE (id->fns, 0);
389 #if 1
390 /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in
391 rest_of_compilation is a good start. */
392 if (id->cloning_p)
393 /* We're building a clone; DECL_INITIAL is still
394 error_mark_node, and current_binding_level is the parm
395 binding level. */
396 lang_hooks.decls.insert_block (new_block);
397 else
399 /* Attach this new block after the DECL_INITIAL block for the
400 function into which this block is being inlined. In
401 rest_of_compilation we will straighten out the BLOCK tree. */
402 tree *first_block;
403 if (DECL_INITIAL (fn))
404 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
405 else
406 first_block = &DECL_INITIAL (fn);
407 BLOCK_CHAIN (new_block) = *first_block;
408 *first_block = new_block;
410 #endif
411 /* Remember the remapped block. */
412 insert_decl_map (id, old_block, new_block);
415 static void
416 copy_statement_list (tree *tp)
418 tree_stmt_iterator oi, ni;
419 tree new;
421 new = alloc_stmt_list ();
422 ni = tsi_start (new);
423 oi = tsi_start (*tp);
424 *tp = new;
426 for (; !tsi_end_p (oi); tsi_next (&oi))
427 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
430 static void
431 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
433 tree block = BIND_EXPR_BLOCK (*tp);
434 /* Copy (and replace) the statement. */
435 copy_tree_r (tp, walk_subtrees, NULL);
436 if (block)
438 remap_block (&block, id);
439 BIND_EXPR_BLOCK (*tp) = block;
442 if (BIND_EXPR_VARS (*tp))
443 /* This will remap a lot of the same decls again, but this should be
444 harmless. */
445 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
448 /* Called from copy_body via walk_tree. DATA is really an `inline_data *'. */
450 static tree
451 copy_body_r (tree *tp, int *walk_subtrees, void *data)
453 inline_data *id = (inline_data *) data;
454 tree fn = VARRAY_TOP_TREE (id->fns);
456 #if 0
457 /* All automatic variables should have a DECL_CONTEXT indicating
458 what function they come from. */
459 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
460 && DECL_NAMESPACE_SCOPE_P (*tp))
461 gcc_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp));
462 #endif
464 /* If this is a RETURN_EXPR, change it into a MODIFY_EXPR and a
465 GOTO_EXPR with the RET_LABEL as its target. */
466 if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
468 tree return_stmt = *tp;
469 tree goto_stmt;
471 /* Build the GOTO_EXPR. */
472 tree assignment = TREE_OPERAND (return_stmt, 0);
473 goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
474 TREE_USED (id->ret_label) = 1;
476 /* If we're returning something, just turn that into an
477 assignment into the equivalent of the original
478 RESULT_DECL. */
479 if (assignment)
481 /* Do not create a statement containing a naked RESULT_DECL. */
482 if (TREE_CODE (assignment) == RESULT_DECL)
483 gimplify_stmt (&assignment);
485 *tp = build (BIND_EXPR, void_type_node, NULL, NULL, NULL);
486 append_to_statement_list (assignment, &BIND_EXPR_BODY (*tp));
487 append_to_statement_list (goto_stmt, &BIND_EXPR_BODY (*tp));
489 /* If we're not returning anything just do the jump. */
490 else
491 *tp = goto_stmt;
493 /* Local variables and labels need to be replaced by equivalent
494 variables. We don't want to copy static variables; there's only
495 one of those, no matter how many times we inline the containing
496 function. Similarly for globals from an outer function. */
497 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
499 tree new_decl;
501 /* Remap the declaration. */
502 new_decl = remap_decl (*tp, id);
503 gcc_assert (new_decl);
504 /* Replace this variable with the copy. */
505 STRIP_TYPE_NOPS (new_decl);
506 *tp = new_decl;
508 else if (TREE_CODE (*tp) == STATEMENT_LIST)
509 copy_statement_list (tp);
510 else if (TREE_CODE (*tp) == SAVE_EXPR)
511 remap_save_expr (tp, id->decl_map, walk_subtrees);
512 else if (TREE_CODE (*tp) == BIND_EXPR)
513 copy_bind_expr (tp, walk_subtrees, id);
514 /* Types may need remapping as well. */
515 else if (TYPE_P (*tp))
516 *tp = remap_type (*tp, id);
518 /* If this is a constant, we have to copy the node iff the type will be
519 remapped. copy_tree_r will not copy a constant. */
520 else if (TREE_CODE_CLASS (TREE_CODE (*tp)) == tcc_constant)
522 tree new_type = remap_type (TREE_TYPE (*tp), id);
524 if (new_type == TREE_TYPE (*tp))
525 *walk_subtrees = 0;
527 else if (TREE_CODE (*tp) == INTEGER_CST)
528 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
529 TREE_INT_CST_HIGH (*tp));
530 else
532 *tp = copy_node (*tp);
533 TREE_TYPE (*tp) = new_type;
537 /* Otherwise, just copy the node. Note that copy_tree_r already
538 knows not to copy VAR_DECLs, etc., so this is safe. */
539 else
541 tree old_node = *tp;
543 if (TREE_CODE (*tp) == MODIFY_EXPR
544 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
545 && (lang_hooks.tree_inlining.auto_var_in_fn_p
546 (TREE_OPERAND (*tp, 0), fn)))
548 /* Some assignments VAR = VAR; don't generate any rtl code
549 and thus don't count as variable modification. Avoid
550 keeping bogosities like 0 = 0. */
551 tree decl = TREE_OPERAND (*tp, 0), value;
552 splay_tree_node n;
554 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
555 if (n)
557 value = (tree) n->value;
558 STRIP_TYPE_NOPS (value);
559 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
561 *tp = build_empty_stmt ();
562 return copy_body_r (tp, walk_subtrees, data);
566 else if (TREE_CODE (*tp) == INDIRECT_REF)
568 /* Get rid of *& from inline substitutions that can happen when a
569 pointer argument is an ADDR_EXPR. */
570 tree decl = TREE_OPERAND (*tp, 0), value;
571 splay_tree_node n;
573 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
574 if (n)
576 value = (tree) n->value;
577 STRIP_NOPS (value);
578 if (TREE_CODE (value) == ADDR_EXPR
579 && (lang_hooks.types_compatible_p
580 (TREE_TYPE (*tp), TREE_TYPE (TREE_OPERAND (value, 0)))))
582 *tp = TREE_OPERAND (value, 0);
583 return copy_body_r (tp, walk_subtrees, data);
588 copy_tree_r (tp, walk_subtrees, NULL);
590 if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp))
592 if (id->saving_p)
594 struct cgraph_node *node;
595 struct cgraph_edge *edge;
597 for (node = id->node->next_clone; node; node = node->next_clone)
599 edge = cgraph_edge (node, old_node);
600 gcc_assert (edge);
601 edge->call_expr = *tp;
604 else
606 struct cgraph_edge *edge
607 = cgraph_edge (id->current_node, old_node);
609 if (edge)
610 cgraph_clone_edge (edge, id->node, *tp);
614 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
616 /* The copied TARGET_EXPR has never been expanded, even if the
617 original node was expanded already. */
618 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
620 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
621 TREE_OPERAND (*tp, 3) = NULL_TREE;
624 /* Variable substitution need not be simple. In particular, the
625 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
626 and friends are up-to-date. */
627 else if (TREE_CODE (*tp) == ADDR_EXPR)
629 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
630 recompute_tree_invarant_for_addr_expr (*tp);
631 *walk_subtrees = 0;
635 /* Keep iterating. */
636 return NULL_TREE;
639 /* Make a copy of the body of FN so that it can be inserted inline in
640 another function. */
642 static tree
643 copy_body (inline_data *id)
645 tree body;
646 tree fndecl = VARRAY_TOP_TREE (id->fns);
648 if (fndecl == current_function_decl
649 && cfun->saved_tree)
650 body = cfun->saved_tree;
651 else
652 body = DECL_SAVED_TREE (fndecl);
653 walk_tree (&body, copy_body_r, id, NULL);
655 return body;
658 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
659 defined in function FN, or of a data member thereof. */
661 static bool
662 self_inlining_addr_expr (tree value, tree fn)
664 tree var;
666 if (TREE_CODE (value) != ADDR_EXPR)
667 return false;
669 var = get_base_address (TREE_OPERAND (value, 0));
671 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
674 static void
675 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
676 tree *init_stmts, tree *vars, bool *gimplify_init_stmts_p)
678 tree init_stmt;
679 tree var;
681 /* If the parameter is never assigned to, we may not need to
682 create a new variable here at all. Instead, we may be able
683 to just use the argument value. */
684 if (TREE_READONLY (p)
685 && !TREE_ADDRESSABLE (p)
686 && value && !TREE_SIDE_EFFECTS (value))
688 /* We can't risk substituting complex expressions. They
689 might contain variables that will be assigned to later.
690 Theoretically, we could check the expression to see if
691 all of the variables that determine its value are
692 read-only, but we don't bother. */
693 /* We may produce non-gimple trees by adding NOPs or introduce
694 invalid sharing when operand is not really constant.
695 It is not big deal to prohibit constant propagation here as
696 we will constant propagate in DOM1 pass anyway. */
697 if (is_gimple_min_invariant (value)
698 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
699 /* We have to be very careful about ADDR_EXPR. Make sure
700 the base variable isn't a local variable of the inlined
701 function, e.g., when doing recursive inlining, direct or
702 mutually-recursive or whatever, which is why we don't
703 just test whether fn == current_function_decl. */
704 && ! self_inlining_addr_expr (value, fn))
706 insert_decl_map (id, p, value);
707 return;
711 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
712 here since the type of this decl must be visible to the calling
713 function. */
714 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
716 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
717 that way, when the PARM_DECL is encountered, it will be
718 automatically replaced by the VAR_DECL. */
719 insert_decl_map (id, p, var);
721 /* Declare this new variable. */
722 TREE_CHAIN (var) = *vars;
723 *vars = var;
725 /* Make gimplifier happy about this variable. */
726 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
728 /* Even if P was TREE_READONLY, the new VAR should not be.
729 In the original code, we would have constructed a
730 temporary, and then the function body would have never
731 changed the value of P. However, now, we will be
732 constructing VAR directly. The constructor body may
733 change its value multiple times as it is being
734 constructed. Therefore, it must not be TREE_READONLY;
735 the back-end assumes that TREE_READONLY variable is
736 assigned to only once. */
737 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
738 TREE_READONLY (var) = 0;
740 /* Initialize this VAR_DECL from the equivalent argument. Convert
741 the argument to the proper type in case it was promoted. */
742 if (value)
744 tree rhs = fold_convert (TREE_TYPE (var), value);
746 if (rhs == error_mark_node)
747 return;
749 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
750 keep our trees in gimple form. */
751 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
752 append_to_statement_list (init_stmt, init_stmts);
754 /* If we did not create a gimple value and we did not create a gimple
755 cast of a gimple value, then we will need to gimplify INIT_STMTS
756 at the end. Note that is_gimple_cast only checks the outer
757 tree code, not its operand. Thus the explicit check that it's
758 operand is a gimple value. */
759 if (!is_gimple_val (rhs)
760 && (!is_gimple_cast (rhs)
761 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
762 *gimplify_init_stmts_p = true;
766 /* Generate code to initialize the parameters of the function at the
767 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
769 static tree
770 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
771 tree fn, tree bind_expr)
773 tree init_stmts = NULL_TREE;
774 tree parms;
775 tree a;
776 tree p;
777 tree vars = NULL_TREE;
778 bool gimplify_init_stmts_p = false;
779 int argnum = 0;
781 /* Figure out what the parameters are. */
782 parms = DECL_ARGUMENTS (fn);
783 if (fn == current_function_decl)
784 parms = cfun->saved_args;
786 /* Loop through the parameter declarations, replacing each with an
787 equivalent VAR_DECL, appropriately initialized. */
788 for (p = parms, a = args; p;
789 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
791 tree value;
793 ++argnum;
795 /* Find the initializer. */
796 value = lang_hooks.tree_inlining.convert_parm_for_inlining
797 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
799 setup_one_parameter (id, p, value, fn, &init_stmts, &vars,
800 &gimplify_init_stmts_p);
803 /* Evaluate trailing arguments. */
804 for (; a; a = TREE_CHAIN (a))
806 tree value = TREE_VALUE (a);
807 append_to_statement_list (value, &init_stmts);
810 /* Initialize the static chain. */
811 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
812 if (p)
814 /* No static chain? Seems like a bug in tree-nested.c. */
815 gcc_assert (static_chain);
817 setup_one_parameter (id, p, static_chain, fn, &init_stmts, &vars,
818 &gimplify_init_stmts_p);
821 if (gimplify_init_stmts_p)
822 gimplify_body (&init_stmts, current_function_decl, false);
824 declare_inline_vars (bind_expr, vars);
825 return init_stmts;
828 /* Declare a return variable to replace the RESULT_DECL for the function we
829 are calling. RETURN_SLOT_ADDR, if non-null, was a fake parameter that
830 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
831 the MODIFY_EXPR to which this call is the RHS.
833 The return value is a (possibly null) value that is the result of the
834 function as seen by the callee. *USE_P is a (possibly null) value that
835 holds the result as seen by the caller. */
837 static tree
838 declare_return_variable (inline_data *id, tree return_slot_addr,
839 tree modify_dest, tree *use_p)
841 tree callee = VARRAY_TOP_TREE (id->fns);
842 tree caller = VARRAY_TREE (id->fns, 0);
843 tree result = DECL_RESULT (callee);
844 tree callee_type = TREE_TYPE (result);
845 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
846 tree var, use;
848 /* We don't need to do anything for functions that don't return
849 anything. */
850 if (!result || VOID_TYPE_P (callee_type))
852 *use_p = NULL_TREE;
853 return NULL_TREE;
856 /* If there was a return slot, then the return value is the
857 dereferenced address of that object. */
858 if (return_slot_addr)
860 /* The front end shouldn't have used both return_slot_addr and
861 a modify expression. */
862 gcc_assert (!modify_dest);
863 if (DECL_BY_REFERENCE (result))
864 var = return_slot_addr;
865 else
866 var = build_fold_indirect_ref (return_slot_addr);
867 use = NULL;
868 goto done;
871 /* All types requiring non-trivial constructors should have been handled. */
872 gcc_assert (!TREE_ADDRESSABLE (callee_type));
874 /* Attempt to avoid creating a new temporary variable. */
875 if (modify_dest)
877 bool use_it = false;
879 /* We can't use MODIFY_DEST if there's type promotion involved. */
880 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
881 use_it = false;
883 /* ??? If we're assigning to a variable sized type, then we must
884 reuse the destination variable, because we've no good way to
885 create variable sized temporaries at this point. */
886 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
887 use_it = true;
889 /* If the callee cannot possibly modify MODIFY_DEST, then we can
890 reuse it as the result of the call directly. Don't do this if
891 it would promote MODIFY_DEST to addressable. */
892 else if (!TREE_STATIC (modify_dest)
893 && !TREE_ADDRESSABLE (modify_dest)
894 && !TREE_ADDRESSABLE (result))
895 use_it = true;
897 if (use_it)
899 var = modify_dest;
900 use = NULL;
901 goto done;
905 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
907 var = copy_decl_for_inlining (result, callee, caller);
908 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
909 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
910 = tree_cons (NULL_TREE, var,
911 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
913 /* Do not have the rest of GCC warn about this variable as it should
914 not be visible to the user. */
915 TREE_NO_WARNING (var) = 1;
917 /* Build the use expr. If the return type of the function was
918 promoted, convert it back to the expected type. */
919 use = var;
920 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
921 use = fold_convert (caller_type, var);
923 done:
924 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
925 way, when the RESULT_DECL is encountered, it will be
926 automatically replaced by the VAR_DECL. */
927 insert_decl_map (id, result, var);
929 /* Remember this so we can ignore it in remap_decls. */
930 id->retvar = var;
932 *use_p = use;
933 return var;
936 /* Returns nonzero if a function can be inlined as a tree. */
938 bool
939 tree_inlinable_function_p (tree fn)
941 return inlinable_function_p (fn);
944 static const char *inline_forbidden_reason;
946 static tree
947 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
948 void *fnp)
950 tree node = *nodep;
951 tree fn = (tree) fnp;
952 tree t;
954 switch (TREE_CODE (node))
956 case CALL_EXPR:
957 /* Refuse to inline alloca call unless user explicitly forced so as
958 this may change program's memory overhead drastically when the
959 function using alloca is called in loop. In GCC present in
960 SPEC2000 inlining into schedule_block cause it to require 2GB of
961 RAM instead of 256MB. */
962 if (alloca_call_p (node)
963 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
965 inline_forbidden_reason
966 = N_("%Jfunction %qF can never be inlined because it uses "
967 "alloca (override using the always_inline attribute)");
968 return node;
970 t = get_callee_fndecl (node);
971 if (! t)
972 break;
974 /* We cannot inline functions that call setjmp. */
975 if (setjmp_call_p (t))
977 inline_forbidden_reason
978 = N_("%Jfunction %qF can never be inlined because it uses setjmp");
979 return node;
982 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
983 switch (DECL_FUNCTION_CODE (t))
985 /* We cannot inline functions that take a variable number of
986 arguments. */
987 case BUILT_IN_VA_START:
988 case BUILT_IN_STDARG_START:
989 case BUILT_IN_NEXT_ARG:
990 case BUILT_IN_VA_END:
991 inline_forbidden_reason
992 = N_("%Jfunction %qF can never be inlined because it "
993 "uses variable argument lists");
994 return node;
996 case BUILT_IN_LONGJMP:
997 /* We can't inline functions that call __builtin_longjmp at
998 all. The non-local goto machinery really requires the
999 destination be in a different function. If we allow the
1000 function calling __builtin_longjmp to be inlined into the
1001 function calling __builtin_setjmp, Things will Go Awry. */
1002 inline_forbidden_reason
1003 = N_("%Jfunction %qF can never be inlined because "
1004 "it uses setjmp-longjmp exception handling");
1005 return node;
1007 case BUILT_IN_NONLOCAL_GOTO:
1008 /* Similarly. */
1009 inline_forbidden_reason
1010 = N_("%Jfunction %qF can never be inlined because "
1011 "it uses non-local goto");
1012 return node;
1014 default:
1015 break;
1017 break;
1019 case GOTO_EXPR:
1020 t = TREE_OPERAND (node, 0);
1022 /* We will not inline a function which uses computed goto. The
1023 addresses of its local labels, which may be tucked into
1024 global storage, are of course not constant across
1025 instantiations, which causes unexpected behavior. */
1026 if (TREE_CODE (t) != LABEL_DECL)
1028 inline_forbidden_reason
1029 = N_("%Jfunction %qF can never be inlined "
1030 "because it contains a computed goto");
1031 return node;
1033 break;
1035 case LABEL_EXPR:
1036 t = TREE_OPERAND (node, 0);
1037 if (DECL_NONLOCAL (t))
1039 /* We cannot inline a function that receives a non-local goto
1040 because we cannot remap the destination label used in the
1041 function that is performing the non-local goto. */
1042 inline_forbidden_reason
1043 = N_("%Jfunction %qF can never be inlined "
1044 "because it receives a non-local goto");
1045 return node;
1047 break;
1049 case RECORD_TYPE:
1050 case UNION_TYPE:
1051 /* We cannot inline a function of the form
1053 void F (int i) { struct S { int ar[i]; } s; }
1055 Attempting to do so produces a catch-22.
1056 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1057 UNION_TYPE nodes, then it goes into infinite recursion on a
1058 structure containing a pointer to its own type. If it doesn't,
1059 then the type node for S doesn't get adjusted properly when
1060 F is inlined, and we abort in find_function_data.
1062 ??? This is likely no longer true, but it's too late in the 4.0
1063 cycle to try to find out. This should be checked for 4.1. */
1064 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1065 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1067 inline_forbidden_reason
1068 = N_("%Jfunction %qF can never be inlined "
1069 "because it uses variable sized variables");
1070 return node;
1073 default:
1074 break;
1077 return NULL_TREE;
1080 /* Return subexpression representing possible alloca call, if any. */
1081 static tree
1082 inline_forbidden_p (tree fndecl)
1084 location_t saved_loc = input_location;
1085 tree ret = walk_tree_without_duplicates (&DECL_SAVED_TREE (fndecl),
1086 inline_forbidden_p_1, fndecl);
1088 input_location = saved_loc;
1089 return ret;
1092 /* Returns nonzero if FN is a function that does not have any
1093 fundamental inline blocking properties. */
1095 static bool
1096 inlinable_function_p (tree fn)
1098 bool inlinable = true;
1100 /* If we've already decided this function shouldn't be inlined,
1101 there's no need to check again. */
1102 if (DECL_UNINLINABLE (fn))
1103 return false;
1105 /* See if there is any language-specific reason it cannot be
1106 inlined. (It is important that this hook be called early because
1107 in C++ it may result in template instantiation.)
1108 If the function is not inlinable for language-specific reasons,
1109 it is left up to the langhook to explain why. */
1110 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1112 /* If we don't have the function body available, we can't inline it.
1113 However, this should not be recorded since we also get here for
1114 forward declared inline functions. Therefore, return at once. */
1115 if (!DECL_SAVED_TREE (fn))
1116 return false;
1118 /* If we're not inlining at all, then we cannot inline this function. */
1119 else if (!flag_inline_trees)
1120 inlinable = false;
1122 /* Only try to inline functions if DECL_INLINE is set. This should be
1123 true for all functions declared `inline', and for all other functions
1124 as well with -finline-functions.
1126 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1127 it's the front-end that must set DECL_INLINE in this case, because
1128 dwarf2out loses if a function that does not have DECL_INLINE set is
1129 inlined anyway. That is why we have both DECL_INLINE and
1130 DECL_DECLARED_INLINE_P. */
1131 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1132 here should be redundant. */
1133 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1134 inlinable = false;
1136 else if (inline_forbidden_p (fn))
1138 /* See if we should warn about uninlinable functions. Previously,
1139 some of these warnings would be issued while trying to expand
1140 the function inline, but that would cause multiple warnings
1141 about functions that would for example call alloca. But since
1142 this a property of the function, just one warning is enough.
1143 As a bonus we can now give more details about the reason why a
1144 function is not inlinable.
1145 We only warn for functions declared `inline' by the user. */
1146 bool do_warning = (warn_inline
1147 && DECL_INLINE (fn)
1148 && DECL_DECLARED_INLINE_P (fn)
1149 && !DECL_IN_SYSTEM_HEADER (fn));
1151 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1152 sorry (inline_forbidden_reason, fn, fn);
1153 else if (do_warning)
1154 warning (inline_forbidden_reason, fn, fn);
1156 inlinable = false;
1159 /* Squirrel away the result so that we don't have to check again. */
1160 DECL_UNINLINABLE (fn) = !inlinable;
1162 return inlinable;
1165 /* Used by estimate_num_insns. Estimate number of instructions seen
1166 by given statement. */
1168 static tree
1169 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1171 int *count = data;
1172 tree x = *tp;
1174 if (IS_TYPE_OR_DECL_P (x))
1176 *walk_subtrees = 0;
1177 return NULL;
1179 /* Assume that constants and references counts nothing. These should
1180 be majorized by amount of operations among them we count later
1181 and are common target of CSE and similar optimizations. */
1182 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1183 return NULL;
1185 switch (TREE_CODE (x))
1187 /* Containers have no cost. */
1188 case TREE_LIST:
1189 case TREE_VEC:
1190 case BLOCK:
1191 case COMPONENT_REF:
1192 case BIT_FIELD_REF:
1193 case INDIRECT_REF:
1194 case ALIGN_INDIRECT_REF:
1195 case MISALIGNED_INDIRECT_REF:
1196 case ARRAY_REF:
1197 case ARRAY_RANGE_REF:
1198 case OBJ_TYPE_REF:
1199 case EXC_PTR_EXPR: /* ??? */
1200 case FILTER_EXPR: /* ??? */
1201 case COMPOUND_EXPR:
1202 case BIND_EXPR:
1203 case WITH_CLEANUP_EXPR:
1204 case NOP_EXPR:
1205 case VIEW_CONVERT_EXPR:
1206 case SAVE_EXPR:
1207 case ADDR_EXPR:
1208 case COMPLEX_EXPR:
1209 case RANGE_EXPR:
1210 case CASE_LABEL_EXPR:
1211 case SSA_NAME:
1212 case CATCH_EXPR:
1213 case EH_FILTER_EXPR:
1214 case STATEMENT_LIST:
1215 case ERROR_MARK:
1216 case NON_LVALUE_EXPR:
1217 case FDESC_EXPR:
1218 case VA_ARG_EXPR:
1219 case TRY_CATCH_EXPR:
1220 case TRY_FINALLY_EXPR:
1221 case LABEL_EXPR:
1222 case GOTO_EXPR:
1223 case RETURN_EXPR:
1224 case EXIT_EXPR:
1225 case LOOP_EXPR:
1226 case PHI_NODE:
1227 case WITH_SIZE_EXPR:
1228 break;
1230 /* We don't account constants for now. Assume that the cost is amortized
1231 by operations that do use them. We may re-consider this decision once
1232 we are able to optimize the tree before estimating it's size and break
1233 out static initializers. */
1234 case IDENTIFIER_NODE:
1235 case INTEGER_CST:
1236 case REAL_CST:
1237 case COMPLEX_CST:
1238 case VECTOR_CST:
1239 case STRING_CST:
1240 *walk_subtrees = 0;
1241 return NULL;
1243 /* Recognize assignments of large structures and constructors of
1244 big arrays. */
1245 case INIT_EXPR:
1246 case MODIFY_EXPR:
1247 x = TREE_OPERAND (x, 0);
1248 /* FALLTHRU */
1249 case TARGET_EXPR:
1250 case CONSTRUCTOR:
1252 HOST_WIDE_INT size;
1254 size = int_size_in_bytes (TREE_TYPE (x));
1256 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1257 *count += 10;
1258 else
1259 *count += ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1261 break;
1263 /* Assign cost of 1 to usual operations.
1264 ??? We may consider mapping RTL costs to this. */
1265 case COND_EXPR:
1267 case PLUS_EXPR:
1268 case MINUS_EXPR:
1269 case MULT_EXPR:
1271 case FIX_TRUNC_EXPR:
1272 case FIX_CEIL_EXPR:
1273 case FIX_FLOOR_EXPR:
1274 case FIX_ROUND_EXPR:
1276 case NEGATE_EXPR:
1277 case FLOAT_EXPR:
1278 case MIN_EXPR:
1279 case MAX_EXPR:
1280 case ABS_EXPR:
1282 case LSHIFT_EXPR:
1283 case RSHIFT_EXPR:
1284 case LROTATE_EXPR:
1285 case RROTATE_EXPR:
1287 case BIT_IOR_EXPR:
1288 case BIT_XOR_EXPR:
1289 case BIT_AND_EXPR:
1290 case BIT_NOT_EXPR:
1292 case TRUTH_ANDIF_EXPR:
1293 case TRUTH_ORIF_EXPR:
1294 case TRUTH_AND_EXPR:
1295 case TRUTH_OR_EXPR:
1296 case TRUTH_XOR_EXPR:
1297 case TRUTH_NOT_EXPR:
1299 case LT_EXPR:
1300 case LE_EXPR:
1301 case GT_EXPR:
1302 case GE_EXPR:
1303 case EQ_EXPR:
1304 case NE_EXPR:
1305 case ORDERED_EXPR:
1306 case UNORDERED_EXPR:
1308 case UNLT_EXPR:
1309 case UNLE_EXPR:
1310 case UNGT_EXPR:
1311 case UNGE_EXPR:
1312 case UNEQ_EXPR:
1313 case LTGT_EXPR:
1315 case CONVERT_EXPR:
1317 case CONJ_EXPR:
1319 case PREDECREMENT_EXPR:
1320 case PREINCREMENT_EXPR:
1321 case POSTDECREMENT_EXPR:
1322 case POSTINCREMENT_EXPR:
1324 case SWITCH_EXPR:
1326 case ASM_EXPR:
1328 case REALIGN_LOAD_EXPR:
1330 case RESX_EXPR:
1331 *count += 1;
1332 break;
1334 /* Few special cases of expensive operations. This is useful
1335 to avoid inlining on functions having too many of these. */
1336 case TRUNC_DIV_EXPR:
1337 case CEIL_DIV_EXPR:
1338 case FLOOR_DIV_EXPR:
1339 case ROUND_DIV_EXPR:
1340 case EXACT_DIV_EXPR:
1341 case TRUNC_MOD_EXPR:
1342 case CEIL_MOD_EXPR:
1343 case FLOOR_MOD_EXPR:
1344 case ROUND_MOD_EXPR:
1345 case RDIV_EXPR:
1346 *count += 10;
1347 break;
1348 case CALL_EXPR:
1350 tree decl = get_callee_fndecl (x);
1352 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1353 switch (DECL_FUNCTION_CODE (decl))
1355 case BUILT_IN_CONSTANT_P:
1356 *walk_subtrees = 0;
1357 return NULL_TREE;
1358 case BUILT_IN_EXPECT:
1359 return NULL_TREE;
1360 default:
1361 break;
1363 *count += 10;
1364 break;
1366 default:
1367 /* Abort here se we know we don't miss any nodes. */
1368 gcc_unreachable ();
1370 return NULL;
1373 /* Estimate number of instructions that will be created by expanding EXPR. */
1376 estimate_num_insns (tree expr)
1378 int num = 0;
1379 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1380 return num;
1383 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1385 static tree
1386 expand_call_inline (tree *tp, int *walk_subtrees, void *data)
1388 inline_data *id;
1389 tree t;
1390 tree expr;
1391 tree stmt;
1392 tree use_retvar;
1393 tree decl;
1394 tree fn;
1395 tree arg_inits;
1396 tree *inlined_body;
1397 splay_tree st;
1398 tree args;
1399 tree return_slot_addr;
1400 tree modify_dest;
1401 location_t saved_location;
1402 struct cgraph_edge *edge;
1403 const char *reason;
1405 /* See what we've got. */
1406 id = (inline_data *) data;
1407 t = *tp;
1409 /* Set input_location here so we get the right instantiation context
1410 if we call instantiate_decl from inlinable_function_p. */
1411 saved_location = input_location;
1412 if (EXPR_HAS_LOCATION (t))
1413 input_location = EXPR_LOCATION (t);
1415 /* Recurse, but letting recursive invocations know that we are
1416 inside the body of a TARGET_EXPR. */
1417 if (TREE_CODE (*tp) == TARGET_EXPR)
1419 #if 0
1420 int i, len = TREE_CODE_LENGTH (TARGET_EXPR);
1422 /* We're walking our own subtrees. */
1423 *walk_subtrees = 0;
1425 /* Actually walk over them. This loop is the body of
1426 walk_trees, omitting the case where the TARGET_EXPR
1427 itself is handled. */
1428 for (i = 0; i < len; ++i)
1430 if (i == 2)
1431 ++id->in_target_cleanup_p;
1432 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
1433 id->tree_pruner);
1434 if (i == 2)
1435 --id->in_target_cleanup_p;
1438 goto egress;
1439 #endif
1442 if (TYPE_P (t))
1443 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1444 them should not be expanded. This can happen if the type is a
1445 dynamic array type, for example. */
1446 *walk_subtrees = 0;
1448 /* From here on, we're only interested in CALL_EXPRs. */
1449 if (TREE_CODE (t) != CALL_EXPR)
1450 goto egress;
1452 /* First, see if we can figure out what function is being called.
1453 If we cannot, then there is no hope of inlining the function. */
1454 fn = get_callee_fndecl (t);
1455 if (!fn)
1456 goto egress;
1458 /* Turn forward declarations into real ones. */
1459 fn = cgraph_node (fn)->decl;
1461 /* If fn is a declaration of a function in a nested scope that was
1462 globally declared inline, we don't set its DECL_INITIAL.
1463 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1464 C++ front-end uses it for cdtors to refer to their internal
1465 declarations, that are not real functions. Fortunately those
1466 don't have trees to be saved, so we can tell by checking their
1467 DECL_SAVED_TREE. */
1468 if (! DECL_INITIAL (fn)
1469 && DECL_ABSTRACT_ORIGIN (fn)
1470 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1471 fn = DECL_ABSTRACT_ORIGIN (fn);
1473 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1474 Kill this check once this is fixed. */
1475 if (!id->current_node->analyzed)
1476 goto egress;
1478 edge = cgraph_edge (id->current_node, t);
1480 /* Constant propagation on argument done during previous inlining
1481 may create new direct call. Produce an edge for it. */
1482 if (!edge)
1484 struct cgraph_node *dest = cgraph_node (fn);
1486 /* We have missing edge in the callgraph. This can happen in one case
1487 where previous inlining turned indirect call into direct call by
1488 constant propagating arguments. In all other cases we hit a bug
1489 (incorrect node sharing is most common reason for missing edges. */
1490 gcc_assert (dest->needed || !flag_unit_at_a_time);
1491 cgraph_create_edge (id->node, dest, t)->inline_failed
1492 = N_("originally indirect function call not considered for inlining");
1493 goto egress;
1496 /* Don't try to inline functions that are not well-suited to
1497 inlining. */
1498 if (!cgraph_inline_p (edge, &reason))
1500 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1502 sorry ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
1503 sorry ("called from here");
1505 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1506 && !DECL_IN_SYSTEM_HEADER (fn)
1507 && strlen (reason)
1508 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)))
1510 warning ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
1511 warning ("called from here");
1513 goto egress;
1516 #ifdef ENABLE_CHECKING
1517 if (edge->callee->decl != id->node->decl)
1518 verify_cgraph_node (edge->callee);
1519 #endif
1521 if (! lang_hooks.tree_inlining.start_inlining (fn))
1522 goto egress;
1524 /* Build a block containing code to initialize the arguments, the
1525 actual inline expansion of the body, and a label for the return
1526 statements within the function to jump to. The type of the
1527 statement expression is the return type of the function call. */
1528 stmt = NULL;
1529 expr = build (BIND_EXPR, void_type_node, NULL_TREE,
1530 stmt, make_node (BLOCK));
1531 BLOCK_ABSTRACT_ORIGIN (BIND_EXPR_BLOCK (expr)) = fn;
1533 /* Local declarations will be replaced by their equivalents in this
1534 map. */
1535 st = id->decl_map;
1536 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
1537 NULL, NULL);
1539 /* Initialize the parameters. */
1540 args = TREE_OPERAND (t, 1);
1541 return_slot_addr = NULL_TREE;
1542 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
1544 return_slot_addr = TREE_VALUE (args);
1545 args = TREE_CHAIN (args);
1546 TREE_TYPE (expr) = void_type_node;
1549 arg_inits = initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2),
1550 fn, expr);
1551 if (arg_inits)
1553 /* Expand any inlined calls in the initializers. Do this before we
1554 push FN on the stack of functions we are inlining; we want to
1555 inline calls to FN that appear in the initializers for the
1556 parameters.
1558 Note we need to save and restore the saved tree statement iterator
1559 to avoid having it clobbered by expand_calls_inline. */
1560 tree_stmt_iterator save_tsi;
1562 save_tsi = id->tsi;
1563 expand_calls_inline (&arg_inits, id);
1564 id->tsi = save_tsi;
1566 /* And add them to the tree. */
1567 append_to_statement_list (arg_inits, &BIND_EXPR_BODY (expr));
1570 /* Record the function we are about to inline so that we can avoid
1571 recursing into it. */
1572 VARRAY_PUSH_TREE (id->fns, fn);
1574 /* Return statements in the function body will be replaced by jumps
1575 to the RET_LABEL. */
1576 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1577 DECL_ARTIFICIAL (id->ret_label) = 1;
1578 DECL_IGNORED_P (id->ret_label) = 1;
1579 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
1580 insert_decl_map (id, id->ret_label, id->ret_label);
1582 gcc_assert (DECL_INITIAL (fn));
1583 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
1585 /* Find the lhs to which the result of this call is assigned. */
1586 modify_dest = tsi_stmt (id->tsi);
1587 if (TREE_CODE (modify_dest) == MODIFY_EXPR)
1589 modify_dest = TREE_OPERAND (modify_dest, 0);
1591 /* The function which we are inlining might not return a value,
1592 in which case we should issue a warning that the function
1593 does not return a value. In that case the optimizers will
1594 see that the variable to which the value is assigned was not
1595 initialized. We do not want to issue a warning about that
1596 uninitialized variable. */
1597 if (DECL_P (modify_dest))
1598 TREE_NO_WARNING (modify_dest) = 1;
1600 else
1601 modify_dest = NULL;
1603 /* Declare the return variable for the function. */
1604 decl = declare_return_variable (id, return_slot_addr,
1605 modify_dest, &use_retvar);
1607 /* After we've initialized the parameters, we insert the body of the
1608 function itself. */
1610 struct cgraph_node *old_node = id->current_node;
1611 tree copy;
1613 id->current_node = edge->callee;
1614 copy = copy_body (id);
1616 if (warn_return_type
1617 && !TREE_NO_WARNING (fn)
1618 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
1619 && block_may_fallthru (copy))
1621 warning ("control may reach end of non-void function %qD being inlined",
1622 fn);
1623 TREE_NO_WARNING (fn) = 1;
1626 append_to_statement_list (copy, &BIND_EXPR_BODY (expr));
1627 id->current_node = old_node;
1629 inlined_body = &BIND_EXPR_BODY (expr);
1631 /* After the body of the function comes the RET_LABEL. This must come
1632 before we evaluate the returned value below, because that evaluation
1633 may cause RTL to be generated. */
1634 if (TREE_USED (id->ret_label))
1636 tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
1637 append_to_statement_list (label, &BIND_EXPR_BODY (expr));
1640 /* Clean up. */
1641 splay_tree_delete (id->decl_map);
1642 id->decl_map = st;
1644 /* Although, from the semantic viewpoint, the new expression has
1645 side-effects only if the old one did, it is not possible, from
1646 the technical viewpoint, to evaluate the body of a function
1647 multiple times without serious havoc. */
1648 TREE_SIDE_EFFECTS (expr) = 1;
1650 tsi_link_before (&id->tsi, expr, TSI_SAME_STMT);
1652 /* If the inlined function returns a result that we care about,
1653 then we're going to need to splice in a MODIFY_EXPR. Otherwise
1654 the call was a standalone statement and we can just replace it
1655 with the BIND_EXPR inline representation of the called function. */
1656 if (!use_retvar || !modify_dest)
1657 *tsi_stmt_ptr (id->tsi) = build_empty_stmt ();
1658 else
1659 *tp = use_retvar;
1661 /* When we gimplify a function call, we may clear TREE_SIDE_EFFECTS on
1662 the call if it is to a "const" function. Thus the copy of
1663 TREE_SIDE_EFFECTS from the CALL_EXPR to the BIND_EXPR above with
1664 result in TREE_SIDE_EFFECTS not being set for the inlined copy of a
1665 "const" function.
1667 Unfortunately, that is wrong as inlining the function can create/expose
1668 interesting side effects (such as setting of a return value).
1670 The easiest solution is to simply recalculate TREE_SIDE_EFFECTS for
1671 the toplevel expression. */
1672 recalculate_side_effects (expr);
1674 /* Output the inlining info for this abstract function, since it has been
1675 inlined. If we don't do this now, we can lose the information about the
1676 variables in the function when the blocks get blown away as soon as we
1677 remove the cgraph node. */
1678 (*debug_hooks->outlining_inline_function) (edge->callee->decl);
1680 /* Update callgraph if needed. */
1681 cgraph_remove_node (edge->callee);
1683 /* Recurse into the body of the just inlined function. */
1684 expand_calls_inline (inlined_body, id);
1685 VARRAY_POP (id->fns);
1687 /* Don't walk into subtrees. We've already handled them above. */
1688 *walk_subtrees = 0;
1690 lang_hooks.tree_inlining.end_inlining (fn);
1692 /* Keep iterating. */
1693 egress:
1694 input_location = saved_location;
1695 return NULL_TREE;
1698 static void
1699 expand_calls_inline (tree *stmt_p, inline_data *id)
1701 tree stmt = *stmt_p;
1702 enum tree_code code = TREE_CODE (stmt);
1703 int dummy;
1705 switch (code)
1707 case STATEMENT_LIST:
1709 tree_stmt_iterator i;
1710 tree new;
1712 for (i = tsi_start (stmt); !tsi_end_p (i); )
1714 id->tsi = i;
1715 expand_calls_inline (tsi_stmt_ptr (i), id);
1717 new = tsi_stmt (i);
1718 if (TREE_CODE (new) == STATEMENT_LIST)
1720 tsi_link_before (&i, new, TSI_SAME_STMT);
1721 tsi_delink (&i);
1723 else
1724 tsi_next (&i);
1727 break;
1729 case COND_EXPR:
1730 expand_calls_inline (&COND_EXPR_THEN (stmt), id);
1731 expand_calls_inline (&COND_EXPR_ELSE (stmt), id);
1732 break;
1734 case CATCH_EXPR:
1735 expand_calls_inline (&CATCH_BODY (stmt), id);
1736 break;
1738 case EH_FILTER_EXPR:
1739 expand_calls_inline (&EH_FILTER_FAILURE (stmt), id);
1740 break;
1742 case TRY_CATCH_EXPR:
1743 case TRY_FINALLY_EXPR:
1744 expand_calls_inline (&TREE_OPERAND (stmt, 0), id);
1745 expand_calls_inline (&TREE_OPERAND (stmt, 1), id);
1746 break;
1748 case BIND_EXPR:
1749 expand_calls_inline (&BIND_EXPR_BODY (stmt), id);
1750 break;
1752 case COMPOUND_EXPR:
1753 /* We're gimple. We should have gotten rid of all these. */
1754 gcc_unreachable ();
1756 case RETURN_EXPR:
1757 stmt_p = &TREE_OPERAND (stmt, 0);
1758 stmt = *stmt_p;
1759 if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
1760 break;
1762 /* FALLTHRU */
1764 case MODIFY_EXPR:
1765 stmt_p = &TREE_OPERAND (stmt, 1);
1766 stmt = *stmt_p;
1767 if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
1769 stmt_p = &TREE_OPERAND (stmt, 0);
1770 stmt = *stmt_p;
1772 if (TREE_CODE (stmt) != CALL_EXPR)
1773 break;
1775 /* FALLTHRU */
1777 case CALL_EXPR:
1778 expand_call_inline (stmt_p, &dummy, id);
1779 break;
1781 default:
1782 break;
1786 /* Expand calls to inline functions in the body of FN. */
1788 void
1789 optimize_inline_calls (tree fn)
1791 inline_data id;
1792 tree prev_fn;
1794 /* There is no point in performing inlining if errors have already
1795 occurred -- and we might crash if we try to inline invalid
1796 code. */
1797 if (errorcount || sorrycount)
1798 return;
1800 /* Clear out ID. */
1801 memset (&id, 0, sizeof (id));
1803 id.current_node = id.node = cgraph_node (fn);
1804 /* Don't allow recursion into FN. */
1805 VARRAY_TREE_INIT (id.fns, 32, "fns");
1806 VARRAY_PUSH_TREE (id.fns, fn);
1807 /* Or any functions that aren't finished yet. */
1808 prev_fn = NULL_TREE;
1809 if (current_function_decl)
1811 VARRAY_PUSH_TREE (id.fns, current_function_decl);
1812 prev_fn = current_function_decl;
1815 prev_fn = lang_hooks.tree_inlining.add_pending_fn_decls (&id.fns, prev_fn);
1817 /* Keep track of the low-water mark, i.e., the point where the first
1818 real inlining is represented in ID.FNS. */
1819 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1821 /* Replace all calls to inline functions with the bodies of those
1822 functions. */
1823 id.tree_pruner = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
1824 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1826 /* Clean up. */
1827 htab_delete (id.tree_pruner);
1829 #ifdef ENABLE_CHECKING
1831 struct cgraph_edge *e;
1833 verify_cgraph_node (id.node);
1835 /* Double check that we inlined everything we are supposed to inline. */
1836 for (e = id.node->callees; e; e = e->next_callee)
1837 gcc_assert (e->inline_failed);
1839 #endif
1842 /* FN is a function that has a complete body, and CLONE is a function whose
1843 body is to be set to a copy of FN, mapping argument declarations according
1844 to the ARG_MAP splay_tree. */
1846 void
1847 clone_body (tree clone, tree fn, void *arg_map)
1849 inline_data id;
1851 /* Clone the body, as if we were making an inline call. But, remap the
1852 parameters in the callee to the parameters of caller. If there's an
1853 in-charge parameter, map it to an appropriate constant. */
1854 memset (&id, 0, sizeof (id));
1855 VARRAY_TREE_INIT (id.fns, 2, "fns");
1856 VARRAY_PUSH_TREE (id.fns, clone);
1857 VARRAY_PUSH_TREE (id.fns, fn);
1858 id.decl_map = (splay_tree)arg_map;
1860 /* Cloning is treated slightly differently from inlining. Set
1861 CLONING_P so that it's clear which operation we're performing. */
1862 id.cloning_p = true;
1864 /* Actually copy the body. */
1865 append_to_statement_list_force (copy_body (&id), &DECL_SAVED_TREE (clone));
1868 /* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
1869 in *arg_copy and of the static chain, if any, in *sc_copy. */
1871 tree
1872 save_body (tree fn, tree *arg_copy, tree *sc_copy)
1874 inline_data id;
1875 tree body, *parg;
1877 memset (&id, 0, sizeof (id));
1878 VARRAY_TREE_INIT (id.fns, 1, "fns");
1879 VARRAY_PUSH_TREE (id.fns, fn);
1880 id.node = cgraph_node (fn);
1881 id.saving_p = true;
1882 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
1883 *arg_copy = DECL_ARGUMENTS (fn);
1885 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
1887 tree new = copy_node (*parg);
1889 lang_hooks.dup_lang_specific_decl (new);
1890 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
1891 insert_decl_map (&id, *parg, new);
1892 TREE_CHAIN (new) = TREE_CHAIN (*parg);
1893 *parg = new;
1896 *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1897 if (*sc_copy)
1899 tree new = copy_node (*sc_copy);
1901 lang_hooks.dup_lang_specific_decl (new);
1902 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
1903 insert_decl_map (&id, *sc_copy, new);
1904 TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
1905 *sc_copy = new;
1908 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
1910 /* Actually copy the body. */
1911 body = copy_body (&id);
1913 /* Clean up. */
1914 splay_tree_delete (id.decl_map);
1915 return body;
1918 #define WALK_SUBTREE(NODE) \
1919 do \
1921 result = walk_tree (&(NODE), func, data, pset); \
1922 if (result) \
1923 return result; \
1925 while (0)
1927 /* This is a subroutine of walk_tree that walks field of TYPE that are to
1928 be walked whenever a type is seen in the tree. Rest of operands and return
1929 value are as for walk_tree. */
1931 static tree
1932 walk_type_fields (tree type, walk_tree_fn func, void *data,
1933 struct pointer_set_t *pset)
1935 tree result = NULL_TREE;
1937 switch (TREE_CODE (type))
1939 case POINTER_TYPE:
1940 case REFERENCE_TYPE:
1941 /* We have to worry about mutually recursive pointers. These can't
1942 be written in C. They can in Ada. It's pathological, but
1943 there's an ACATS test (c38102a) that checks it. Deal with this
1944 by checking if we're pointing to another pointer, that one
1945 points to another pointer, that one does too, and we have no htab.
1946 If so, get a hash table. We check three levels deep to avoid
1947 the cost of the hash table if we don't need one. */
1948 if (POINTER_TYPE_P (TREE_TYPE (type))
1949 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
1950 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
1951 && !pset)
1953 result = walk_tree_without_duplicates (&TREE_TYPE (type),
1954 func, data);
1955 if (result)
1956 return result;
1958 break;
1961 /* ... fall through ... */
1963 case COMPLEX_TYPE:
1964 WALK_SUBTREE (TREE_TYPE (type));
1965 break;
1967 case METHOD_TYPE:
1968 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
1970 /* Fall through. */
1972 case FUNCTION_TYPE:
1973 WALK_SUBTREE (TREE_TYPE (type));
1975 tree arg;
1977 /* We never want to walk into default arguments. */
1978 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
1979 WALK_SUBTREE (TREE_VALUE (arg));
1981 break;
1983 case ARRAY_TYPE:
1984 /* Don't follow this nodes's type if a pointer for fear that we'll
1985 have infinite recursion. Those types are uninteresting anyway. */
1986 if (!POINTER_TYPE_P (TREE_TYPE (type))
1987 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE)
1988 WALK_SUBTREE (TREE_TYPE (type));
1989 WALK_SUBTREE (TYPE_DOMAIN (type));
1990 break;
1992 case BOOLEAN_TYPE:
1993 case ENUMERAL_TYPE:
1994 case INTEGER_TYPE:
1995 case CHAR_TYPE:
1996 case REAL_TYPE:
1997 WALK_SUBTREE (TYPE_MIN_VALUE (type));
1998 WALK_SUBTREE (TYPE_MAX_VALUE (type));
1999 break;
2001 case OFFSET_TYPE:
2002 WALK_SUBTREE (TREE_TYPE (type));
2003 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
2004 break;
2006 default:
2007 break;
2010 return NULL_TREE;
2013 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
2014 called with the DATA and the address of each sub-tree. If FUNC returns a
2015 non-NULL value, the traversal is aborted, and the value returned by FUNC
2016 is returned. If PSET is non-NULL it is used to record the nodes visited,
2017 and to avoid visiting a node more than once. */
2019 tree
2020 walk_tree (tree *tp, walk_tree_fn func, void *data, struct pointer_set_t *pset)
2022 enum tree_code code;
2023 int walk_subtrees;
2024 tree result;
2026 #define WALK_SUBTREE_TAIL(NODE) \
2027 do \
2029 tp = & (NODE); \
2030 goto tail_recurse; \
2032 while (0)
2034 tail_recurse:
2035 /* Skip empty subtrees. */
2036 if (!*tp)
2037 return NULL_TREE;
2039 /* Don't walk the same tree twice, if the user has requested
2040 that we avoid doing so. */
2041 if (pset && pointer_set_insert (pset, *tp))
2042 return NULL_TREE;
2044 /* Call the function. */
2045 walk_subtrees = 1;
2046 result = (*func) (tp, &walk_subtrees, data);
2048 /* If we found something, return it. */
2049 if (result)
2050 return result;
2052 code = TREE_CODE (*tp);
2054 /* Even if we didn't, FUNC may have decided that there was nothing
2055 interesting below this point in the tree. */
2056 if (!walk_subtrees)
2058 if (code == TREE_LIST)
2059 /* But we still need to check our siblings. */
2060 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2061 else
2062 return NULL_TREE;
2065 result = lang_hooks.tree_inlining.walk_subtrees (tp, &walk_subtrees, func,
2066 data, pset);
2067 if (result || ! walk_subtrees)
2068 return result;
2070 /* If this is a DECL_EXPR, walk into various fields of the type that it's
2071 defining. We only want to walk into these fields of a type in this
2072 case. Note that decls get walked as part of the processing of a
2073 BIND_EXPR.
2075 ??? Precisely which fields of types that we are supposed to walk in
2076 this case vs. the normal case aren't well defined. */
2077 if (code == DECL_EXPR
2078 && TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL
2079 && TREE_CODE (TREE_TYPE (DECL_EXPR_DECL (*tp))) != ERROR_MARK)
2081 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
2083 /* Call the function for the type. See if it returns anything or
2084 doesn't want us to continue. If we are to continue, walk both
2085 the normal fields and those for the declaration case. */
2086 result = (*func) (type_p, &walk_subtrees, data);
2087 if (result || !walk_subtrees)
2088 return NULL_TREE;
2090 result = walk_type_fields (*type_p, func, data, pset);
2091 if (result)
2092 return result;
2094 WALK_SUBTREE (TYPE_SIZE (*type_p));
2095 WALK_SUBTREE (TYPE_SIZE_UNIT (*type_p));
2097 /* If this is a record type, also walk the fields. */
2098 if (TREE_CODE (*type_p) == RECORD_TYPE
2099 || TREE_CODE (*type_p) == UNION_TYPE
2100 || TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2102 tree field;
2104 for (field = TYPE_FIELDS (*type_p); field;
2105 field = TREE_CHAIN (field))
2107 /* We'd like to look at the type of the field, but we can easily
2108 get infinite recursion. So assume it's pointed to elsewhere
2109 in the tree. Also, ignore things that aren't fields. */
2110 if (TREE_CODE (field) != FIELD_DECL)
2111 continue;
2113 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
2114 WALK_SUBTREE (DECL_SIZE (field));
2115 WALK_SUBTREE (DECL_SIZE_UNIT (field));
2116 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2117 WALK_SUBTREE (DECL_QUALIFIER (field));
2122 else if (code != SAVE_EXPR
2123 && code != BIND_EXPR
2124 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2126 int i, len;
2128 /* Walk over all the sub-trees of this operand. */
2129 len = TREE_CODE_LENGTH (code);
2130 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
2131 But, we only want to walk once. */
2132 if (code == TARGET_EXPR
2133 && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
2134 --len;
2136 /* Go through the subtrees. We need to do this in forward order so
2137 that the scope of a FOR_EXPR is handled properly. */
2138 #ifdef DEBUG_WALK_TREE
2139 for (i = 0; i < len; ++i)
2140 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2141 #else
2142 for (i = 0; i < len - 1; ++i)
2143 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2145 if (len)
2147 /* The common case is that we may tail recurse here. */
2148 if (code != BIND_EXPR
2149 && !TREE_CHAIN (*tp))
2150 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
2151 else
2152 WALK_SUBTREE (TREE_OPERAND (*tp, len - 1));
2154 #endif
2157 /* If this is a type, walk the needed fields in the type. */
2158 else if (TYPE_P (*tp))
2160 result = walk_type_fields (*tp, func, data, pset);
2161 if (result)
2162 return result;
2164 else
2166 /* Not one of the easy cases. We must explicitly go through the
2167 children. */
2168 switch (code)
2170 case ERROR_MARK:
2171 case IDENTIFIER_NODE:
2172 case INTEGER_CST:
2173 case REAL_CST:
2174 case VECTOR_CST:
2175 case STRING_CST:
2176 case BLOCK:
2177 case PLACEHOLDER_EXPR:
2178 case SSA_NAME:
2179 case FIELD_DECL:
2180 case RESULT_DECL:
2181 /* None of thse have subtrees other than those already walked
2182 above. */
2183 break;
2185 case TREE_LIST:
2186 WALK_SUBTREE (TREE_VALUE (*tp));
2187 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2188 break;
2190 case TREE_VEC:
2192 int len = TREE_VEC_LENGTH (*tp);
2194 if (len == 0)
2195 break;
2197 /* Walk all elements but the first. */
2198 while (--len)
2199 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
2201 /* Now walk the first one as a tail call. */
2202 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
2205 case COMPLEX_CST:
2206 WALK_SUBTREE (TREE_REALPART (*tp));
2207 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
2209 case CONSTRUCTOR:
2210 WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
2212 case SAVE_EXPR:
2213 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
2215 case BIND_EXPR:
2217 tree decl;
2218 for (decl = BIND_EXPR_VARS (*tp); decl; decl = TREE_CHAIN (decl))
2220 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
2221 into declarations that are just mentioned, rather than
2222 declared; they don't really belong to this part of the tree.
2223 And, we can see cycles: the initializer for a declaration
2224 can refer to the declaration itself. */
2225 WALK_SUBTREE (DECL_INITIAL (decl));
2226 WALK_SUBTREE (DECL_SIZE (decl));
2227 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
2229 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
2232 case STATEMENT_LIST:
2234 tree_stmt_iterator i;
2235 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
2236 WALK_SUBTREE (*tsi_stmt_ptr (i));
2238 break;
2240 default:
2241 /* ??? This could be a language-defined node. We really should make
2242 a hook for it, but right now just ignore it. */
2243 break;
2247 /* We didn't find what we were looking for. */
2248 return NULL_TREE;
2250 #undef WALK_SUBTREE
2251 #undef WALK_SUBTREE_TAIL
2254 /* Like walk_tree, but does not walk duplicate nodes more than once. */
2256 tree
2257 walk_tree_without_duplicates (tree *tp, walk_tree_fn func, void *data)
2259 tree result;
2260 struct pointer_set_t *pset;
2262 pset = pointer_set_create ();
2263 result = walk_tree (tp, func, data, pset);
2264 pointer_set_destroy (pset);
2265 return result;
2268 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2270 tree
2271 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2273 enum tree_code code = TREE_CODE (*tp);
2275 /* We make copies of most nodes. */
2276 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2277 || code == TREE_LIST
2278 || code == TREE_VEC
2279 || code == TYPE_DECL)
2281 /* Because the chain gets clobbered when we make a copy, we save it
2282 here. */
2283 tree chain = TREE_CHAIN (*tp);
2284 tree new;
2286 /* Copy the node. */
2287 new = copy_node (*tp);
2289 /* Propagate mudflap marked-ness. */
2290 if (flag_mudflap && mf_marked_p (*tp))
2291 mf_mark (new);
2293 *tp = new;
2295 /* Now, restore the chain, if appropriate. That will cause
2296 walk_tree to walk into the chain as well. */
2297 if (code == PARM_DECL || code == TREE_LIST)
2298 TREE_CHAIN (*tp) = chain;
2300 /* For now, we don't update BLOCKs when we make copies. So, we
2301 have to nullify all BIND_EXPRs. */
2302 if (TREE_CODE (*tp) == BIND_EXPR)
2303 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2306 else if (TREE_CODE_CLASS (code) == tcc_type)
2307 *walk_subtrees = 0;
2308 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2309 *walk_subtrees = 0;
2310 else if (TREE_CODE_CLASS (code) == tcc_constant)
2311 *walk_subtrees = 0;
2312 else
2313 gcc_assert (code != STATEMENT_LIST);
2314 return NULL_TREE;
2317 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2318 information indicating to what new SAVE_EXPR this one should be mapped,
2319 use that one. Otherwise, create a new node and enter it in ST. */
2321 static void
2322 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2324 splay_tree st = (splay_tree) st_;
2325 splay_tree_node n;
2326 tree t;
2328 /* See if we already encountered this SAVE_EXPR. */
2329 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2331 /* If we didn't already remap this SAVE_EXPR, do so now. */
2332 if (!n)
2334 t = copy_node (*tp);
2336 /* Remember this SAVE_EXPR. */
2337 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2338 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2339 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2341 else
2343 /* We've already walked into this SAVE_EXPR; don't do it again. */
2344 *walk_subtrees = 0;
2345 t = (tree) n->value;
2348 /* Replace this SAVE_EXPR with the copy. */
2349 *tp = t;
2352 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2353 copies the declaration and enters it in the splay_tree in DATA (which is
2354 really an `inline_data *'). */
2356 static tree
2357 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2358 void *data)
2360 inline_data *id = (inline_data *) data;
2362 /* Don't walk into types. */
2363 if (TYPE_P (*tp))
2364 *walk_subtrees = 0;
2366 else if (TREE_CODE (*tp) == LABEL_EXPR)
2368 tree decl = TREE_OPERAND (*tp, 0);
2370 /* Copy the decl and remember the copy. */
2371 insert_decl_map (id, decl,
2372 copy_decl_for_inlining (decl, DECL_CONTEXT (decl),
2373 DECL_CONTEXT (decl)));
2376 return NULL_TREE;
2379 /* Perform any modifications to EXPR required when it is unsaved. Does
2380 not recurse into EXPR's subtrees. */
2382 static void
2383 unsave_expr_1 (tree expr)
2385 switch (TREE_CODE (expr))
2387 case TARGET_EXPR:
2388 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2389 It's OK for this to happen if it was part of a subtree that
2390 isn't immediately expanded, such as operand 2 of another
2391 TARGET_EXPR. */
2392 if (TREE_OPERAND (expr, 1))
2393 break;
2395 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2396 TREE_OPERAND (expr, 3) = NULL_TREE;
2397 break;
2399 default:
2400 break;
2404 /* Called via walk_tree when an expression is unsaved. Using the
2405 splay_tree pointed to by ST (which is really a `splay_tree'),
2406 remaps all local declarations to appropriate replacements. */
2408 static tree
2409 unsave_r (tree *tp, int *walk_subtrees, void *data)
2411 inline_data *id = (inline_data *) data;
2412 splay_tree st = id->decl_map;
2413 splay_tree_node n;
2415 /* Only a local declaration (variable or label). */
2416 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2417 || TREE_CODE (*tp) == LABEL_DECL)
2419 /* Lookup the declaration. */
2420 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2422 /* If it's there, remap it. */
2423 if (n)
2424 *tp = (tree) n->value;
2427 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2428 copy_statement_list (tp);
2429 else if (TREE_CODE (*tp) == BIND_EXPR)
2430 copy_bind_expr (tp, walk_subtrees, id);
2431 else if (TREE_CODE (*tp) == SAVE_EXPR)
2432 remap_save_expr (tp, st, walk_subtrees);
2433 else
2435 copy_tree_r (tp, walk_subtrees, NULL);
2437 /* Do whatever unsaving is required. */
2438 unsave_expr_1 (*tp);
2441 /* Keep iterating. */
2442 return NULL_TREE;
2445 /* Copies everything in EXPR and replaces variables, labels
2446 and SAVE_EXPRs local to EXPR. */
2448 tree
2449 unsave_expr_now (tree expr)
2451 inline_data id;
2453 /* There's nothing to do for NULL_TREE. */
2454 if (expr == 0)
2455 return expr;
2457 /* Set up ID. */
2458 memset (&id, 0, sizeof (id));
2459 VARRAY_TREE_INIT (id.fns, 1, "fns");
2460 VARRAY_PUSH_TREE (id.fns, current_function_decl);
2461 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2463 /* Walk the tree once to find local labels. */
2464 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2466 /* Walk the tree again, copying, remapping, and unsaving. */
2467 walk_tree (&expr, unsave_r, &id, NULL);
2469 /* Clean up. */
2470 splay_tree_delete (id.decl_map);
2472 return expr;
2475 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2477 static tree
2478 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2480 if (*tp == data)
2481 return (tree) data;
2482 else
2483 return NULL;
2486 bool
2487 debug_find_tree (tree top, tree search)
2489 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2492 /* Declare the variables created by the inliner. Add all the variables in
2493 VARS to BIND_EXPR. */
2495 static void
2496 declare_inline_vars (tree bind_expr, tree vars)
2498 tree t;
2499 for (t = vars; t; t = TREE_CHAIN (t))
2500 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2502 add_var_to_bind_expr (bind_expr, vars);