* varasm.c (named_section): Use xstrdup rather than doing it by
[official-gcc.git] / gcc / tree-inline.c
blob120bab0fb21e4932f93b58b40b8593236a8add9e
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "integrate.h"
36 #include "varray.h"
37 #include "hashtab.h"
38 #include "pointer-set.h"
39 #include "splay-tree.h"
40 #include "langhooks.h"
41 #include "cgraph.h"
42 #include "intl.h"
43 #include "tree-mudflap.h"
44 #include "tree-flow.h"
45 #include "function.h"
46 #include "diagnostic.h"
47 #include "debug.h"
49 /* I'm not real happy about this, but we need to handle gimple and
50 non-gimple trees. */
51 #include "tree-iterator.h"
52 #include "tree-gimple.h"
54 /* 0 if we should not perform inlining.
55 1 if we should expand functions calls inline at the tree level.
56 2 if we should consider *all* functions to be inline
57 candidates. */
59 int flag_inline_trees = 0;
61 /* To Do:
63 o In order to make inlining-on-trees work, we pessimized
64 function-local static constants. In particular, they are now
65 always output, even when not addressed. Fix this by treating
66 function-local static constants just like global static
67 constants; the back-end already knows not to output them if they
68 are not needed.
70 o Provide heuristics to clamp inlining of recursive template
71 calls? */
73 /* Data required for function inlining. */
75 typedef struct inline_data
77 /* A stack of the functions we are inlining. For example, if we are
78 compiling `f', which calls `g', which calls `h', and we are
79 inlining the body of `h', the stack will contain, `h', followed
80 by `g', followed by `f'. The first few elements of the stack may
81 contain other functions that we know we should not recurse into,
82 even though they are not directly being inlined. */
83 varray_type fns;
84 /* The index of the first element of FNS that really represents an
85 inlined function. */
86 unsigned first_inlined_fn;
87 /* The label to jump to when a return statement is encountered. If
88 this value is NULL, then return statements will simply be
89 remapped as return statements, rather than as jumps. */
90 tree ret_label;
91 /* The VAR_DECL for the return value. */
92 tree retvar;
93 /* The map from local declarations in the inlined function to
94 equivalents in the function into which it is being inlined. */
95 splay_tree decl_map;
96 /* Nonzero if we are currently within the cleanup for a
97 TARGET_EXPR. */
98 int in_target_cleanup_p;
99 /* We use the same mechanism to build clones that we do to perform
100 inlining. However, there are a few places where we need to
101 distinguish between those two situations. This flag is true if
102 we are cloning, rather than inlining. */
103 bool cloning_p;
104 /* Similarly for saving function body. */
105 bool saving_p;
106 /* Hash table used to prevent walk_tree from visiting the same node
107 umpteen million times. */
108 htab_t tree_pruner;
109 /* Callgraph node of function we are inlining into. */
110 struct cgraph_node *node;
111 /* Callgraph node of currently inlined function. */
112 struct cgraph_node *current_node;
113 /* Statement iterator. We need this so we can keep the tree in
114 gimple form when we insert the inlined function. It is not
115 used when we are not dealing with gimple trees. */
116 tree_stmt_iterator tsi;
117 } inline_data;
119 /* Prototypes. */
121 /* The approximate number of instructions per statement. This number
122 need not be particularly accurate; it is used only to make
123 decisions about when a function is too big to inline. */
124 #define INSNS_PER_STMT (10)
126 static tree copy_body_r (tree *, int *, void *);
127 static tree copy_body (inline_data *);
128 static tree expand_call_inline (tree *, int *, void *);
129 static void expand_calls_inline (tree *, inline_data *);
130 static bool inlinable_function_p (tree);
131 static tree remap_decl (tree, inline_data *);
132 static tree remap_type (tree, inline_data *);
133 static tree initialize_inlined_parameters (inline_data *, tree,
134 tree, tree, tree);
135 static void remap_block (tree *, inline_data *);
136 static tree remap_decls (tree, inline_data *);
137 static void copy_bind_expr (tree *, int *, inline_data *);
138 static tree mark_local_for_remap_r (tree *, int *, void *);
139 static void unsave_expr_1 (tree);
140 static tree unsave_r (tree *, int *, void *);
141 static void declare_inline_vars (tree bind_expr, tree vars);
142 static void remap_save_expr (tree *, void *, int *);
144 /* Insert a tree->tree mapping for ID. Despite the name suggests
145 that the trees should be variables, it is used for more than that. */
147 static void
148 insert_decl_map (inline_data *id, tree key, tree value)
150 splay_tree_insert (id->decl_map, (splay_tree_key) key,
151 (splay_tree_value) value);
153 /* Always insert an identity map as well. If we see this same new
154 node again, we won't want to duplicate it a second time. */
155 if (key != value)
156 splay_tree_insert (id->decl_map, (splay_tree_key) value,
157 (splay_tree_value) value);
160 /* Remap DECL during the copying of the BLOCK tree for the function.
161 We are only called to remap local variables in the current function. */
163 static tree
164 remap_decl (tree decl, inline_data *id)
166 splay_tree_node n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
167 tree fn = VARRAY_TOP_TREE (id->fns);
169 /* See if we have remapped this declaration. If we didn't already have an
170 equivalent for this declaration, create one now. */
171 if (!n)
173 /* Make a copy of the variable or label. */
174 tree t = copy_decl_for_inlining (decl, fn, VARRAY_TREE (id->fns, 0));
176 /* Remap types, if necessary. */
177 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
178 if (TREE_CODE (t) == TYPE_DECL)
179 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
180 else if (TREE_CODE (t) == PARM_DECL)
181 DECL_ARG_TYPE_AS_WRITTEN (t)
182 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
184 /* Remap sizes as necessary. */
185 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
186 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
188 /* If fields, do likewise for offset and qualifier. */
189 if (TREE_CODE (t) == FIELD_DECL)
191 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
192 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
193 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
196 #if 0
197 /* FIXME handle anon aggrs. */
198 if (! DECL_NAME (t) && TREE_TYPE (t)
199 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
201 /* For a VAR_DECL of anonymous type, we must also copy the
202 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
203 tree members = NULL;
204 tree src;
206 for (src = DECL_ANON_UNION_ELEMS (t); src;
207 src = TREE_CHAIN (src))
209 tree member = remap_decl (TREE_VALUE (src), id);
211 gcc_assert (!TREE_PURPOSE (src));
212 members = tree_cons (NULL, member, members);
214 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
216 #endif
218 /* Remember it, so that if we encounter this local entity
219 again we can reuse this copy. */
220 insert_decl_map (id, decl, t);
221 return t;
224 return unshare_expr ((tree) n->value);
227 static tree
228 remap_type (tree type, inline_data *id)
230 splay_tree_node node;
231 tree new, t;
233 if (type == NULL)
234 return type;
236 /* See if we have remapped this type. */
237 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
238 if (node)
239 return (tree) node->value;
241 /* The type only needs remapping if it's variably modified by a variable
242 in the function we are inlining. */
243 if (! variably_modified_type_p (type, VARRAY_TOP_TREE (id->fns)))
245 insert_decl_map (id, type, type);
246 return type;
249 /* We do need a copy. build and register it now. If this is a pointer or
250 reference type, remap the designated type and make a new pointer or
251 reference type. */
252 if (TREE_CODE (type) == POINTER_TYPE)
254 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
255 TYPE_MODE (type),
256 TYPE_REF_CAN_ALIAS_ALL (type));
257 insert_decl_map (id, type, new);
258 return new;
260 else if (TREE_CODE (type) == REFERENCE_TYPE)
262 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
263 TYPE_MODE (type),
264 TYPE_REF_CAN_ALIAS_ALL (type));
265 insert_decl_map (id, type, new);
266 return new;
268 else
269 new = copy_node (type);
271 insert_decl_map (id, type, new);
273 /* This is a new type, not a copy of an old type. Need to reassociate
274 variants. We can handle everything except the main variant lazily. */
275 t = TYPE_MAIN_VARIANT (type);
276 if (type != t)
278 t = remap_type (t, id);
279 TYPE_MAIN_VARIANT (new) = t;
280 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
281 TYPE_NEXT_VARIANT (t) = new;
283 else
285 TYPE_MAIN_VARIANT (new) = new;
286 TYPE_NEXT_VARIANT (new) = NULL;
289 /* Lazily create pointer and reference types. */
290 TYPE_POINTER_TO (new) = NULL;
291 TYPE_REFERENCE_TO (new) = NULL;
293 switch (TREE_CODE (new))
295 case INTEGER_TYPE:
296 case REAL_TYPE:
297 case ENUMERAL_TYPE:
298 case BOOLEAN_TYPE:
299 case CHAR_TYPE:
300 t = TYPE_MIN_VALUE (new);
301 if (t && TREE_CODE (t) != INTEGER_CST)
302 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
304 t = TYPE_MAX_VALUE (new);
305 if (t && TREE_CODE (t) != INTEGER_CST)
306 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
307 return new;
309 case FUNCTION_TYPE:
310 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
311 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
312 return new;
314 case ARRAY_TYPE:
315 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
316 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
317 break;
319 case RECORD_TYPE:
320 case UNION_TYPE:
321 case QUAL_UNION_TYPE:
322 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
323 break;
325 case FILE_TYPE:
326 case OFFSET_TYPE:
327 default:
328 /* Shouldn't have been thought variable sized. */
329 gcc_unreachable ();
332 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
333 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
335 return new;
338 static tree
339 remap_decls (tree decls, inline_data *id)
341 tree old_var;
342 tree new_decls = NULL_TREE;
344 /* Remap its variables. */
345 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
347 tree new_var;
349 /* Remap the variable. */
350 new_var = remap_decl (old_var, id);
352 /* If we didn't remap this variable, so we can't mess with its
353 TREE_CHAIN. If we remapped this variable to the return slot, it's
354 already declared somewhere else, so don't declare it here. */
355 if (!new_var || new_var == id->retvar)
357 else
359 gcc_assert (DECL_P (new_var));
360 TREE_CHAIN (new_var) = new_decls;
361 new_decls = new_var;
365 return nreverse (new_decls);
368 /* Copy the BLOCK to contain remapped versions of the variables
369 therein. And hook the new block into the block-tree. */
371 static void
372 remap_block (tree *block, inline_data *id)
374 tree old_block;
375 tree new_block;
376 tree fn;
378 /* Make the new block. */
379 old_block = *block;
380 new_block = make_node (BLOCK);
381 TREE_USED (new_block) = TREE_USED (old_block);
382 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
383 *block = new_block;
385 /* Remap its variables. */
386 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
388 fn = VARRAY_TREE (id->fns, 0);
389 #if 1
390 /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in
391 rest_of_compilation is a good start. */
392 if (id->cloning_p)
393 /* We're building a clone; DECL_INITIAL is still
394 error_mark_node, and current_binding_level is the parm
395 binding level. */
396 lang_hooks.decls.insert_block (new_block);
397 else
399 /* Attach this new block after the DECL_INITIAL block for the
400 function into which this block is being inlined. In
401 rest_of_compilation we will straighten out the BLOCK tree. */
402 tree *first_block;
403 if (DECL_INITIAL (fn))
404 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
405 else
406 first_block = &DECL_INITIAL (fn);
407 BLOCK_CHAIN (new_block) = *first_block;
408 *first_block = new_block;
410 #endif
411 /* Remember the remapped block. */
412 insert_decl_map (id, old_block, new_block);
415 static void
416 copy_statement_list (tree *tp)
418 tree_stmt_iterator oi, ni;
419 tree new;
421 new = alloc_stmt_list ();
422 ni = tsi_start (new);
423 oi = tsi_start (*tp);
424 *tp = new;
426 for (; !tsi_end_p (oi); tsi_next (&oi))
427 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
430 static void
431 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
433 tree block = BIND_EXPR_BLOCK (*tp);
434 /* Copy (and replace) the statement. */
435 copy_tree_r (tp, walk_subtrees, NULL);
436 if (block)
438 remap_block (&block, id);
439 BIND_EXPR_BLOCK (*tp) = block;
442 if (BIND_EXPR_VARS (*tp))
443 /* This will remap a lot of the same decls again, but this should be
444 harmless. */
445 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
448 /* Called from copy_body via walk_tree. DATA is really an `inline_data *'. */
450 static tree
451 copy_body_r (tree *tp, int *walk_subtrees, void *data)
453 inline_data *id = (inline_data *) data;
454 tree fn = VARRAY_TOP_TREE (id->fns);
456 #if 0
457 /* All automatic variables should have a DECL_CONTEXT indicating
458 what function they come from. */
459 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
460 && DECL_NAMESPACE_SCOPE_P (*tp))
461 gcc_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp));
462 #endif
464 /* If this is a RETURN_EXPR, change it into a MODIFY_EXPR and a
465 GOTO_EXPR with the RET_LABEL as its target. */
466 if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
468 tree return_stmt = *tp;
469 tree goto_stmt;
471 /* Build the GOTO_EXPR. */
472 tree assignment = TREE_OPERAND (return_stmt, 0);
473 goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
474 TREE_USED (id->ret_label) = 1;
476 /* If we're returning something, just turn that into an
477 assignment into the equivalent of the original
478 RESULT_DECL. */
479 if (assignment)
481 /* Do not create a statement containing a naked RESULT_DECL. */
482 if (TREE_CODE (assignment) == RESULT_DECL)
483 gimplify_stmt (&assignment);
485 *tp = build (BIND_EXPR, void_type_node, NULL, NULL, NULL);
486 append_to_statement_list (assignment, &BIND_EXPR_BODY (*tp));
487 append_to_statement_list (goto_stmt, &BIND_EXPR_BODY (*tp));
489 /* If we're not returning anything just do the jump. */
490 else
491 *tp = goto_stmt;
493 /* Local variables and labels need to be replaced by equivalent
494 variables. We don't want to copy static variables; there's only
495 one of those, no matter how many times we inline the containing
496 function. Similarly for globals from an outer function. */
497 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
499 tree new_decl;
501 /* Remap the declaration. */
502 new_decl = remap_decl (*tp, id);
503 gcc_assert (new_decl);
504 /* Replace this variable with the copy. */
505 STRIP_TYPE_NOPS (new_decl);
506 *tp = new_decl;
507 *walk_subtrees = 0;
509 else if (TREE_CODE (*tp) == STATEMENT_LIST)
510 copy_statement_list (tp);
511 else if (TREE_CODE (*tp) == SAVE_EXPR)
512 remap_save_expr (tp, id->decl_map, walk_subtrees);
513 else if (TREE_CODE (*tp) == BIND_EXPR)
514 copy_bind_expr (tp, walk_subtrees, id);
515 /* Types may need remapping as well. */
516 else if (TYPE_P (*tp))
517 *tp = remap_type (*tp, id);
519 /* If this is a constant, we have to copy the node iff the type will be
520 remapped. copy_tree_r will not copy a constant. */
521 else if (TREE_CODE_CLASS (TREE_CODE (*tp)) == tcc_constant)
523 tree new_type = remap_type (TREE_TYPE (*tp), id);
525 if (new_type == TREE_TYPE (*tp))
526 *walk_subtrees = 0;
528 else if (TREE_CODE (*tp) == INTEGER_CST)
529 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
530 TREE_INT_CST_HIGH (*tp));
531 else
533 *tp = copy_node (*tp);
534 TREE_TYPE (*tp) = new_type;
538 /* Otherwise, just copy the node. Note that copy_tree_r already
539 knows not to copy VAR_DECLs, etc., so this is safe. */
540 else
542 tree old_node = *tp;
544 if (TREE_CODE (*tp) == MODIFY_EXPR
545 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
546 && (lang_hooks.tree_inlining.auto_var_in_fn_p
547 (TREE_OPERAND (*tp, 0), fn)))
549 /* Some assignments VAR = VAR; don't generate any rtl code
550 and thus don't count as variable modification. Avoid
551 keeping bogosities like 0 = 0. */
552 tree decl = TREE_OPERAND (*tp, 0), value;
553 splay_tree_node n;
555 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
556 if (n)
558 value = (tree) n->value;
559 STRIP_TYPE_NOPS (value);
560 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
562 *tp = build_empty_stmt ();
563 return copy_body_r (tp, walk_subtrees, data);
567 else if (TREE_CODE (*tp) == INDIRECT_REF)
569 /* Get rid of *& from inline substitutions that can happen when a
570 pointer argument is an ADDR_EXPR. */
571 tree decl = TREE_OPERAND (*tp, 0), value;
572 splay_tree_node n;
574 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
575 if (n)
577 value = (tree) n->value;
578 STRIP_NOPS (value);
579 if (TREE_CODE (value) == ADDR_EXPR
580 && (lang_hooks.types_compatible_p
581 (TREE_TYPE (*tp), TREE_TYPE (TREE_OPERAND (value, 0)))))
583 *tp = TREE_OPERAND (value, 0);
584 return copy_body_r (tp, walk_subtrees, data);
589 copy_tree_r (tp, walk_subtrees, NULL);
591 if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp))
593 if (id->saving_p)
595 struct cgraph_node *node;
596 struct cgraph_edge *edge;
598 for (node = id->node->next_clone; node; node = node->next_clone)
600 edge = cgraph_edge (node, old_node);
601 gcc_assert (edge);
602 edge->call_expr = *tp;
605 else
607 struct cgraph_edge *edge
608 = cgraph_edge (id->current_node, old_node);
610 if (edge)
611 cgraph_clone_edge (edge, id->node, *tp);
615 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
617 /* The copied TARGET_EXPR has never been expanded, even if the
618 original node was expanded already. */
619 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
621 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
622 TREE_OPERAND (*tp, 3) = NULL_TREE;
625 /* Variable substitution need not be simple. In particular, the
626 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
627 and friends are up-to-date. */
628 else if (TREE_CODE (*tp) == ADDR_EXPR)
630 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
631 recompute_tree_invarant_for_addr_expr (*tp);
632 *walk_subtrees = 0;
636 /* Keep iterating. */
637 return NULL_TREE;
640 /* Make a copy of the body of FN so that it can be inserted inline in
641 another function. */
643 static tree
644 copy_body (inline_data *id)
646 tree body;
647 tree fndecl = VARRAY_TOP_TREE (id->fns);
649 if (fndecl == current_function_decl
650 && cfun->saved_tree)
651 body = cfun->saved_tree;
652 else
653 body = DECL_SAVED_TREE (fndecl);
654 walk_tree (&body, copy_body_r, id, NULL);
656 return body;
659 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
660 defined in function FN, or of a data member thereof. */
662 static bool
663 self_inlining_addr_expr (tree value, tree fn)
665 tree var;
667 if (TREE_CODE (value) != ADDR_EXPR)
668 return false;
670 var = get_base_address (TREE_OPERAND (value, 0));
672 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
675 static void
676 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
677 tree *init_stmts, tree *vars, bool *gimplify_init_stmts_p)
679 tree init_stmt;
680 tree var;
682 /* If the parameter is never assigned to, we may not need to
683 create a new variable here at all. Instead, we may be able
684 to just use the argument value. */
685 if (TREE_READONLY (p)
686 && !TREE_ADDRESSABLE (p)
687 && value && !TREE_SIDE_EFFECTS (value))
689 /* We can't risk substituting complex expressions. They
690 might contain variables that will be assigned to later.
691 Theoretically, we could check the expression to see if
692 all of the variables that determine its value are
693 read-only, but we don't bother. */
694 /* We may produce non-gimple trees by adding NOPs or introduce
695 invalid sharing when operand is not really constant.
696 It is not big deal to prohibit constant propagation here as
697 we will constant propagate in DOM1 pass anyway. */
698 if (is_gimple_min_invariant (value)
699 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
700 /* We have to be very careful about ADDR_EXPR. Make sure
701 the base variable isn't a local variable of the inlined
702 function, e.g., when doing recursive inlining, direct or
703 mutually-recursive or whatever, which is why we don't
704 just test whether fn == current_function_decl. */
705 && ! self_inlining_addr_expr (value, fn))
707 insert_decl_map (id, p, value);
708 return;
712 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
713 here since the type of this decl must be visible to the calling
714 function. */
715 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
717 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
718 that way, when the PARM_DECL is encountered, it will be
719 automatically replaced by the VAR_DECL. */
720 insert_decl_map (id, p, var);
722 /* Declare this new variable. */
723 TREE_CHAIN (var) = *vars;
724 *vars = var;
726 /* Make gimplifier happy about this variable. */
727 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
729 /* Even if P was TREE_READONLY, the new VAR should not be.
730 In the original code, we would have constructed a
731 temporary, and then the function body would have never
732 changed the value of P. However, now, we will be
733 constructing VAR directly. The constructor body may
734 change its value multiple times as it is being
735 constructed. Therefore, it must not be TREE_READONLY;
736 the back-end assumes that TREE_READONLY variable is
737 assigned to only once. */
738 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
739 TREE_READONLY (var) = 0;
741 /* Initialize this VAR_DECL from the equivalent argument. Convert
742 the argument to the proper type in case it was promoted. */
743 if (value)
745 tree rhs = fold_convert (TREE_TYPE (var), value);
747 if (rhs == error_mark_node)
748 return;
750 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
751 keep our trees in gimple form. */
752 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
753 append_to_statement_list (init_stmt, init_stmts);
755 /* If we did not create a gimple value and we did not create a gimple
756 cast of a gimple value, then we will need to gimplify INIT_STMTS
757 at the end. Note that is_gimple_cast only checks the outer
758 tree code, not its operand. Thus the explicit check that it's
759 operand is a gimple value. */
760 if (!is_gimple_val (rhs)
761 && (!is_gimple_cast (rhs)
762 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
763 *gimplify_init_stmts_p = true;
767 /* Generate code to initialize the parameters of the function at the
768 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
770 static tree
771 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
772 tree fn, tree bind_expr)
774 tree init_stmts = NULL_TREE;
775 tree parms;
776 tree a;
777 tree p;
778 tree vars = NULL_TREE;
779 bool gimplify_init_stmts_p = false;
780 int argnum = 0;
782 /* Figure out what the parameters are. */
783 parms = DECL_ARGUMENTS (fn);
784 if (fn == current_function_decl)
785 parms = cfun->saved_args;
787 /* Loop through the parameter declarations, replacing each with an
788 equivalent VAR_DECL, appropriately initialized. */
789 for (p = parms, a = args; p;
790 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
792 tree value;
794 ++argnum;
796 /* Find the initializer. */
797 value = lang_hooks.tree_inlining.convert_parm_for_inlining
798 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
800 setup_one_parameter (id, p, value, fn, &init_stmts, &vars,
801 &gimplify_init_stmts_p);
804 /* Evaluate trailing arguments. */
805 for (; a; a = TREE_CHAIN (a))
807 tree value = TREE_VALUE (a);
808 append_to_statement_list (value, &init_stmts);
811 /* Initialize the static chain. */
812 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
813 if (fn == current_function_decl)
814 p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
815 if (p)
817 /* No static chain? Seems like a bug in tree-nested.c. */
818 gcc_assert (static_chain);
820 setup_one_parameter (id, p, static_chain, fn, &init_stmts, &vars,
821 &gimplify_init_stmts_p);
824 if (gimplify_init_stmts_p)
825 gimplify_body (&init_stmts, current_function_decl, false);
827 declare_inline_vars (bind_expr, vars);
828 return init_stmts;
831 /* Declare a return variable to replace the RESULT_DECL for the function we
832 are calling. RETURN_SLOT_ADDR, if non-null, was a fake parameter that
833 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
834 the MODIFY_EXPR to which this call is the RHS.
836 The return value is a (possibly null) value that is the result of the
837 function as seen by the callee. *USE_P is a (possibly null) value that
838 holds the result as seen by the caller. */
840 static tree
841 declare_return_variable (inline_data *id, tree return_slot_addr,
842 tree modify_dest, tree *use_p)
844 tree callee = VARRAY_TOP_TREE (id->fns);
845 tree caller = VARRAY_TREE (id->fns, 0);
846 tree result = DECL_RESULT (callee);
847 tree callee_type = TREE_TYPE (result);
848 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
849 tree var, use;
851 /* We don't need to do anything for functions that don't return
852 anything. */
853 if (!result || VOID_TYPE_P (callee_type))
855 *use_p = NULL_TREE;
856 return NULL_TREE;
859 /* If there was a return slot, then the return value is the
860 dereferenced address of that object. */
861 if (return_slot_addr)
863 /* The front end shouldn't have used both return_slot_addr and
864 a modify expression. */
865 gcc_assert (!modify_dest);
866 if (DECL_BY_REFERENCE (result))
867 var = return_slot_addr;
868 else
869 var = build_fold_indirect_ref (return_slot_addr);
870 use = NULL;
871 goto done;
874 /* All types requiring non-trivial constructors should have been handled. */
875 gcc_assert (!TREE_ADDRESSABLE (callee_type));
877 /* Attempt to avoid creating a new temporary variable. */
878 if (modify_dest)
880 bool use_it = false;
882 /* We can't use MODIFY_DEST if there's type promotion involved. */
883 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
884 use_it = false;
886 /* ??? If we're assigning to a variable sized type, then we must
887 reuse the destination variable, because we've no good way to
888 create variable sized temporaries at this point. */
889 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
890 use_it = true;
892 /* If the callee cannot possibly modify MODIFY_DEST, then we can
893 reuse it as the result of the call directly. Don't do this if
894 it would promote MODIFY_DEST to addressable. */
895 else if (!TREE_STATIC (modify_dest)
896 && !TREE_ADDRESSABLE (modify_dest)
897 && !TREE_ADDRESSABLE (result))
898 use_it = true;
900 if (use_it)
902 var = modify_dest;
903 use = NULL;
904 goto done;
908 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
910 var = copy_decl_for_inlining (result, callee, caller);
911 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
912 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
913 = tree_cons (NULL_TREE, var,
914 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
916 /* Do not have the rest of GCC warn about this variable as it should
917 not be visible to the user. */
918 TREE_NO_WARNING (var) = 1;
920 /* Build the use expr. If the return type of the function was
921 promoted, convert it back to the expected type. */
922 use = var;
923 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
924 use = fold_convert (caller_type, var);
926 done:
927 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
928 way, when the RESULT_DECL is encountered, it will be
929 automatically replaced by the VAR_DECL. */
930 insert_decl_map (id, result, var);
932 /* Remember this so we can ignore it in remap_decls. */
933 id->retvar = var;
935 *use_p = use;
936 return var;
939 /* Returns nonzero if a function can be inlined as a tree. */
941 bool
942 tree_inlinable_function_p (tree fn)
944 return inlinable_function_p (fn);
947 static const char *inline_forbidden_reason;
949 static tree
950 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
951 void *fnp)
953 tree node = *nodep;
954 tree fn = (tree) fnp;
955 tree t;
957 switch (TREE_CODE (node))
959 case CALL_EXPR:
960 /* Refuse to inline alloca call unless user explicitly forced so as
961 this may change program's memory overhead drastically when the
962 function using alloca is called in loop. In GCC present in
963 SPEC2000 inlining into schedule_block cause it to require 2GB of
964 RAM instead of 256MB. */
965 if (alloca_call_p (node)
966 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
968 inline_forbidden_reason
969 = N_("%Jfunction %qF can never be inlined because it uses "
970 "alloca (override using the always_inline attribute)");
971 return node;
973 t = get_callee_fndecl (node);
974 if (! t)
975 break;
977 /* We cannot inline functions that call setjmp. */
978 if (setjmp_call_p (t))
980 inline_forbidden_reason
981 = N_("%Jfunction %qF can never be inlined because it uses setjmp");
982 return node;
985 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
986 switch (DECL_FUNCTION_CODE (t))
988 /* We cannot inline functions that take a variable number of
989 arguments. */
990 case BUILT_IN_VA_START:
991 case BUILT_IN_STDARG_START:
992 case BUILT_IN_NEXT_ARG:
993 case BUILT_IN_VA_END:
994 inline_forbidden_reason
995 = N_("%Jfunction %qF can never be inlined because it "
996 "uses variable argument lists");
997 return node;
999 case BUILT_IN_LONGJMP:
1000 /* We can't inline functions that call __builtin_longjmp at
1001 all. The non-local goto machinery really requires the
1002 destination be in a different function. If we allow the
1003 function calling __builtin_longjmp to be inlined into the
1004 function calling __builtin_setjmp, Things will Go Awry. */
1005 inline_forbidden_reason
1006 = N_("%Jfunction %qF can never be inlined because "
1007 "it uses setjmp-longjmp exception handling");
1008 return node;
1010 case BUILT_IN_NONLOCAL_GOTO:
1011 /* Similarly. */
1012 inline_forbidden_reason
1013 = N_("%Jfunction %qF can never be inlined because "
1014 "it uses non-local goto");
1015 return node;
1017 default:
1018 break;
1020 break;
1022 case GOTO_EXPR:
1023 t = TREE_OPERAND (node, 0);
1025 /* We will not inline a function which uses computed goto. The
1026 addresses of its local labels, which may be tucked into
1027 global storage, are of course not constant across
1028 instantiations, which causes unexpected behavior. */
1029 if (TREE_CODE (t) != LABEL_DECL)
1031 inline_forbidden_reason
1032 = N_("%Jfunction %qF can never be inlined "
1033 "because it contains a computed goto");
1034 return node;
1036 break;
1038 case LABEL_EXPR:
1039 t = TREE_OPERAND (node, 0);
1040 if (DECL_NONLOCAL (t))
1042 /* We cannot inline a function that receives a non-local goto
1043 because we cannot remap the destination label used in the
1044 function that is performing the non-local goto. */
1045 inline_forbidden_reason
1046 = N_("%Jfunction %qF can never be inlined "
1047 "because it receives a non-local goto");
1048 return node;
1050 break;
1052 case RECORD_TYPE:
1053 case UNION_TYPE:
1054 /* We cannot inline a function of the form
1056 void F (int i) { struct S { int ar[i]; } s; }
1058 Attempting to do so produces a catch-22.
1059 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1060 UNION_TYPE nodes, then it goes into infinite recursion on a
1061 structure containing a pointer to its own type. If it doesn't,
1062 then the type node for S doesn't get adjusted properly when
1063 F is inlined, and we abort in find_function_data.
1065 ??? This is likely no longer true, but it's too late in the 4.0
1066 cycle to try to find out. This should be checked for 4.1. */
1067 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1068 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1070 inline_forbidden_reason
1071 = N_("%Jfunction %qF can never be inlined "
1072 "because it uses variable sized variables");
1073 return node;
1076 default:
1077 break;
1080 return NULL_TREE;
1083 /* Return subexpression representing possible alloca call, if any. */
1084 static tree
1085 inline_forbidden_p (tree fndecl)
1087 location_t saved_loc = input_location;
1088 tree ret = walk_tree_without_duplicates (&DECL_SAVED_TREE (fndecl),
1089 inline_forbidden_p_1, fndecl);
1091 input_location = saved_loc;
1092 return ret;
1095 /* Returns nonzero if FN is a function that does not have any
1096 fundamental inline blocking properties. */
1098 static bool
1099 inlinable_function_p (tree fn)
1101 bool inlinable = true;
1103 /* If we've already decided this function shouldn't be inlined,
1104 there's no need to check again. */
1105 if (DECL_UNINLINABLE (fn))
1106 return false;
1108 /* See if there is any language-specific reason it cannot be
1109 inlined. (It is important that this hook be called early because
1110 in C++ it may result in template instantiation.)
1111 If the function is not inlinable for language-specific reasons,
1112 it is left up to the langhook to explain why. */
1113 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1115 /* If we don't have the function body available, we can't inline it.
1116 However, this should not be recorded since we also get here for
1117 forward declared inline functions. Therefore, return at once. */
1118 if (!DECL_SAVED_TREE (fn))
1119 return false;
1121 /* If we're not inlining at all, then we cannot inline this function. */
1122 else if (!flag_inline_trees)
1123 inlinable = false;
1125 /* Only try to inline functions if DECL_INLINE is set. This should be
1126 true for all functions declared `inline', and for all other functions
1127 as well with -finline-functions.
1129 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1130 it's the front-end that must set DECL_INLINE in this case, because
1131 dwarf2out loses if a function that does not have DECL_INLINE set is
1132 inlined anyway. That is why we have both DECL_INLINE and
1133 DECL_DECLARED_INLINE_P. */
1134 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1135 here should be redundant. */
1136 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1137 inlinable = false;
1139 else if (inline_forbidden_p (fn))
1141 /* See if we should warn about uninlinable functions. Previously,
1142 some of these warnings would be issued while trying to expand
1143 the function inline, but that would cause multiple warnings
1144 about functions that would for example call alloca. But since
1145 this a property of the function, just one warning is enough.
1146 As a bonus we can now give more details about the reason why a
1147 function is not inlinable.
1148 We only warn for functions declared `inline' by the user. */
1149 bool do_warning = (warn_inline
1150 && DECL_INLINE (fn)
1151 && DECL_DECLARED_INLINE_P (fn)
1152 && !DECL_IN_SYSTEM_HEADER (fn));
1154 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1155 sorry (inline_forbidden_reason, fn, fn);
1156 else if (do_warning)
1157 warning (inline_forbidden_reason, fn, fn);
1159 inlinable = false;
1162 /* Squirrel away the result so that we don't have to check again. */
1163 DECL_UNINLINABLE (fn) = !inlinable;
1165 return inlinable;
1168 /* Used by estimate_num_insns. Estimate number of instructions seen
1169 by given statement. */
1171 static tree
1172 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1174 int *count = data;
1175 tree x = *tp;
1177 if (IS_TYPE_OR_DECL_P (x))
1179 *walk_subtrees = 0;
1180 return NULL;
1182 /* Assume that constants and references counts nothing. These should
1183 be majorized by amount of operations among them we count later
1184 and are common target of CSE and similar optimizations. */
1185 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1186 return NULL;
1188 switch (TREE_CODE (x))
1190 /* Containers have no cost. */
1191 case TREE_LIST:
1192 case TREE_VEC:
1193 case BLOCK:
1194 case COMPONENT_REF:
1195 case BIT_FIELD_REF:
1196 case INDIRECT_REF:
1197 case ALIGN_INDIRECT_REF:
1198 case MISALIGNED_INDIRECT_REF:
1199 case ARRAY_REF:
1200 case ARRAY_RANGE_REF:
1201 case OBJ_TYPE_REF:
1202 case EXC_PTR_EXPR: /* ??? */
1203 case FILTER_EXPR: /* ??? */
1204 case COMPOUND_EXPR:
1205 case BIND_EXPR:
1206 case WITH_CLEANUP_EXPR:
1207 case NOP_EXPR:
1208 case VIEW_CONVERT_EXPR:
1209 case SAVE_EXPR:
1210 case ADDR_EXPR:
1211 case COMPLEX_EXPR:
1212 case RANGE_EXPR:
1213 case CASE_LABEL_EXPR:
1214 case SSA_NAME:
1215 case CATCH_EXPR:
1216 case EH_FILTER_EXPR:
1217 case STATEMENT_LIST:
1218 case ERROR_MARK:
1219 case NON_LVALUE_EXPR:
1220 case FDESC_EXPR:
1221 case VA_ARG_EXPR:
1222 case TRY_CATCH_EXPR:
1223 case TRY_FINALLY_EXPR:
1224 case LABEL_EXPR:
1225 case GOTO_EXPR:
1226 case RETURN_EXPR:
1227 case EXIT_EXPR:
1228 case LOOP_EXPR:
1229 case PHI_NODE:
1230 case WITH_SIZE_EXPR:
1231 break;
1233 /* We don't account constants for now. Assume that the cost is amortized
1234 by operations that do use them. We may re-consider this decision once
1235 we are able to optimize the tree before estimating it's size and break
1236 out static initializers. */
1237 case IDENTIFIER_NODE:
1238 case INTEGER_CST:
1239 case REAL_CST:
1240 case COMPLEX_CST:
1241 case VECTOR_CST:
1242 case STRING_CST:
1243 *walk_subtrees = 0;
1244 return NULL;
1246 /* Recognize assignments of large structures and constructors of
1247 big arrays. */
1248 case INIT_EXPR:
1249 case MODIFY_EXPR:
1250 x = TREE_OPERAND (x, 0);
1251 /* FALLTHRU */
1252 case TARGET_EXPR:
1253 case CONSTRUCTOR:
1255 HOST_WIDE_INT size;
1257 size = int_size_in_bytes (TREE_TYPE (x));
1259 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1260 *count += 10;
1261 else
1262 *count += ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1264 break;
1266 /* Assign cost of 1 to usual operations.
1267 ??? We may consider mapping RTL costs to this. */
1268 case COND_EXPR:
1270 case PLUS_EXPR:
1271 case MINUS_EXPR:
1272 case MULT_EXPR:
1274 case FIX_TRUNC_EXPR:
1275 case FIX_CEIL_EXPR:
1276 case FIX_FLOOR_EXPR:
1277 case FIX_ROUND_EXPR:
1279 case NEGATE_EXPR:
1280 case FLOAT_EXPR:
1281 case MIN_EXPR:
1282 case MAX_EXPR:
1283 case ABS_EXPR:
1285 case LSHIFT_EXPR:
1286 case RSHIFT_EXPR:
1287 case LROTATE_EXPR:
1288 case RROTATE_EXPR:
1290 case BIT_IOR_EXPR:
1291 case BIT_XOR_EXPR:
1292 case BIT_AND_EXPR:
1293 case BIT_NOT_EXPR:
1295 case TRUTH_ANDIF_EXPR:
1296 case TRUTH_ORIF_EXPR:
1297 case TRUTH_AND_EXPR:
1298 case TRUTH_OR_EXPR:
1299 case TRUTH_XOR_EXPR:
1300 case TRUTH_NOT_EXPR:
1302 case LT_EXPR:
1303 case LE_EXPR:
1304 case GT_EXPR:
1305 case GE_EXPR:
1306 case EQ_EXPR:
1307 case NE_EXPR:
1308 case ORDERED_EXPR:
1309 case UNORDERED_EXPR:
1311 case UNLT_EXPR:
1312 case UNLE_EXPR:
1313 case UNGT_EXPR:
1314 case UNGE_EXPR:
1315 case UNEQ_EXPR:
1316 case LTGT_EXPR:
1318 case CONVERT_EXPR:
1320 case CONJ_EXPR:
1322 case PREDECREMENT_EXPR:
1323 case PREINCREMENT_EXPR:
1324 case POSTDECREMENT_EXPR:
1325 case POSTINCREMENT_EXPR:
1327 case SWITCH_EXPR:
1329 case ASM_EXPR:
1331 case REALIGN_LOAD_EXPR:
1333 case RESX_EXPR:
1334 *count += 1;
1335 break;
1337 /* Few special cases of expensive operations. This is useful
1338 to avoid inlining on functions having too many of these. */
1339 case TRUNC_DIV_EXPR:
1340 case CEIL_DIV_EXPR:
1341 case FLOOR_DIV_EXPR:
1342 case ROUND_DIV_EXPR:
1343 case EXACT_DIV_EXPR:
1344 case TRUNC_MOD_EXPR:
1345 case CEIL_MOD_EXPR:
1346 case FLOOR_MOD_EXPR:
1347 case ROUND_MOD_EXPR:
1348 case RDIV_EXPR:
1349 *count += 10;
1350 break;
1351 case CALL_EXPR:
1353 tree decl = get_callee_fndecl (x);
1355 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1356 switch (DECL_FUNCTION_CODE (decl))
1358 case BUILT_IN_CONSTANT_P:
1359 *walk_subtrees = 0;
1360 return NULL_TREE;
1361 case BUILT_IN_EXPECT:
1362 return NULL_TREE;
1363 default:
1364 break;
1366 *count += 10;
1367 break;
1369 default:
1370 /* Abort here se we know we don't miss any nodes. */
1371 gcc_unreachable ();
1373 return NULL;
1376 /* Estimate number of instructions that will be created by expanding EXPR. */
1379 estimate_num_insns (tree expr)
1381 int num = 0;
1382 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1383 return num;
1386 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1388 static tree
1389 expand_call_inline (tree *tp, int *walk_subtrees, void *data)
1391 inline_data *id;
1392 tree t;
1393 tree expr;
1394 tree stmt;
1395 tree use_retvar;
1396 tree decl;
1397 tree fn;
1398 tree arg_inits;
1399 tree *inlined_body;
1400 splay_tree st;
1401 tree args;
1402 tree return_slot_addr;
1403 tree modify_dest;
1404 location_t saved_location;
1405 struct cgraph_edge *edge;
1406 const char *reason;
1408 /* See what we've got. */
1409 id = (inline_data *) data;
1410 t = *tp;
1412 /* Set input_location here so we get the right instantiation context
1413 if we call instantiate_decl from inlinable_function_p. */
1414 saved_location = input_location;
1415 if (EXPR_HAS_LOCATION (t))
1416 input_location = EXPR_LOCATION (t);
1418 /* Recurse, but letting recursive invocations know that we are
1419 inside the body of a TARGET_EXPR. */
1420 if (TREE_CODE (*tp) == TARGET_EXPR)
1422 #if 0
1423 int i, len = TREE_CODE_LENGTH (TARGET_EXPR);
1425 /* We're walking our own subtrees. */
1426 *walk_subtrees = 0;
1428 /* Actually walk over them. This loop is the body of
1429 walk_trees, omitting the case where the TARGET_EXPR
1430 itself is handled. */
1431 for (i = 0; i < len; ++i)
1433 if (i == 2)
1434 ++id->in_target_cleanup_p;
1435 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
1436 id->tree_pruner);
1437 if (i == 2)
1438 --id->in_target_cleanup_p;
1441 goto egress;
1442 #endif
1445 if (TYPE_P (t))
1446 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1447 them should not be expanded. This can happen if the type is a
1448 dynamic array type, for example. */
1449 *walk_subtrees = 0;
1451 /* From here on, we're only interested in CALL_EXPRs. */
1452 if (TREE_CODE (t) != CALL_EXPR)
1453 goto egress;
1455 /* First, see if we can figure out what function is being called.
1456 If we cannot, then there is no hope of inlining the function. */
1457 fn = get_callee_fndecl (t);
1458 if (!fn)
1459 goto egress;
1461 /* Turn forward declarations into real ones. */
1462 fn = cgraph_node (fn)->decl;
1464 /* If fn is a declaration of a function in a nested scope that was
1465 globally declared inline, we don't set its DECL_INITIAL.
1466 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1467 C++ front-end uses it for cdtors to refer to their internal
1468 declarations, that are not real functions. Fortunately those
1469 don't have trees to be saved, so we can tell by checking their
1470 DECL_SAVED_TREE. */
1471 if (! DECL_INITIAL (fn)
1472 && DECL_ABSTRACT_ORIGIN (fn)
1473 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1474 fn = DECL_ABSTRACT_ORIGIN (fn);
1476 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1477 Kill this check once this is fixed. */
1478 if (!id->current_node->analyzed)
1479 goto egress;
1481 edge = cgraph_edge (id->current_node, t);
1483 /* Constant propagation on argument done during previous inlining
1484 may create new direct call. Produce an edge for it. */
1485 if (!edge)
1487 struct cgraph_node *dest = cgraph_node (fn);
1489 /* We have missing edge in the callgraph. This can happen in one case
1490 where previous inlining turned indirect call into direct call by
1491 constant propagating arguments. In all other cases we hit a bug
1492 (incorrect node sharing is most common reason for missing edges. */
1493 gcc_assert (dest->needed || !flag_unit_at_a_time);
1494 cgraph_create_edge (id->node, dest, t)->inline_failed
1495 = N_("originally indirect function call not considered for inlining");
1496 goto egress;
1499 /* Don't try to inline functions that are not well-suited to
1500 inlining. */
1501 if (!cgraph_inline_p (edge, &reason))
1503 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1505 sorry ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
1506 sorry ("called from here");
1508 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1509 && !DECL_IN_SYSTEM_HEADER (fn)
1510 && strlen (reason)
1511 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)))
1513 warning ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
1514 warning ("called from here");
1516 goto egress;
1519 #ifdef ENABLE_CHECKING
1520 if (edge->callee->decl != id->node->decl)
1521 verify_cgraph_node (edge->callee);
1522 #endif
1524 if (! lang_hooks.tree_inlining.start_inlining (fn))
1525 goto egress;
1527 /* Build a block containing code to initialize the arguments, the
1528 actual inline expansion of the body, and a label for the return
1529 statements within the function to jump to. The type of the
1530 statement expression is the return type of the function call. */
1531 stmt = NULL;
1532 expr = build (BIND_EXPR, void_type_node, NULL_TREE,
1533 stmt, make_node (BLOCK));
1534 BLOCK_ABSTRACT_ORIGIN (BIND_EXPR_BLOCK (expr)) = fn;
1536 /* Local declarations will be replaced by their equivalents in this
1537 map. */
1538 st = id->decl_map;
1539 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
1540 NULL, NULL);
1542 /* Initialize the parameters. */
1543 args = TREE_OPERAND (t, 1);
1544 return_slot_addr = NULL_TREE;
1545 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
1547 return_slot_addr = TREE_VALUE (args);
1548 args = TREE_CHAIN (args);
1549 TREE_TYPE (expr) = void_type_node;
1552 arg_inits = initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2),
1553 fn, expr);
1554 if (arg_inits)
1556 /* Expand any inlined calls in the initializers. Do this before we
1557 push FN on the stack of functions we are inlining; we want to
1558 inline calls to FN that appear in the initializers for the
1559 parameters.
1561 Note we need to save and restore the saved tree statement iterator
1562 to avoid having it clobbered by expand_calls_inline. */
1563 tree_stmt_iterator save_tsi;
1565 save_tsi = id->tsi;
1566 expand_calls_inline (&arg_inits, id);
1567 id->tsi = save_tsi;
1569 /* And add them to the tree. */
1570 append_to_statement_list (arg_inits, &BIND_EXPR_BODY (expr));
1573 /* Record the function we are about to inline so that we can avoid
1574 recursing into it. */
1575 VARRAY_PUSH_TREE (id->fns, fn);
1577 /* Return statements in the function body will be replaced by jumps
1578 to the RET_LABEL. */
1579 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1580 DECL_ARTIFICIAL (id->ret_label) = 1;
1581 DECL_IGNORED_P (id->ret_label) = 1;
1582 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
1583 insert_decl_map (id, id->ret_label, id->ret_label);
1585 gcc_assert (DECL_INITIAL (fn));
1586 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
1588 /* Find the lhs to which the result of this call is assigned. */
1589 modify_dest = tsi_stmt (id->tsi);
1590 if (TREE_CODE (modify_dest) == MODIFY_EXPR)
1592 modify_dest = TREE_OPERAND (modify_dest, 0);
1594 /* The function which we are inlining might not return a value,
1595 in which case we should issue a warning that the function
1596 does not return a value. In that case the optimizers will
1597 see that the variable to which the value is assigned was not
1598 initialized. We do not want to issue a warning about that
1599 uninitialized variable. */
1600 if (DECL_P (modify_dest))
1601 TREE_NO_WARNING (modify_dest) = 1;
1603 else
1604 modify_dest = NULL;
1606 /* Declare the return variable for the function. */
1607 decl = declare_return_variable (id, return_slot_addr,
1608 modify_dest, &use_retvar);
1610 /* After we've initialized the parameters, we insert the body of the
1611 function itself. */
1613 struct cgraph_node *old_node = id->current_node;
1614 tree copy;
1616 id->current_node = edge->callee;
1617 copy = copy_body (id);
1619 /* If the function uses a return slot, then it may legitimately
1620 fall through while still returning a value, so we have to skip
1621 the warning here. */
1622 if (warn_return_type
1623 && !TREE_NO_WARNING (fn)
1624 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
1625 && return_slot_addr == NULL_TREE
1626 && block_may_fallthru (copy))
1628 warning ("control may reach end of non-void function %qD being inlined",
1629 fn);
1630 TREE_NO_WARNING (fn) = 1;
1633 append_to_statement_list (copy, &BIND_EXPR_BODY (expr));
1634 id->current_node = old_node;
1636 inlined_body = &BIND_EXPR_BODY (expr);
1638 /* After the body of the function comes the RET_LABEL. This must come
1639 before we evaluate the returned value below, because that evaluation
1640 may cause RTL to be generated. */
1641 if (TREE_USED (id->ret_label))
1643 tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
1644 append_to_statement_list (label, &BIND_EXPR_BODY (expr));
1647 /* Clean up. */
1648 splay_tree_delete (id->decl_map);
1649 id->decl_map = st;
1651 /* Although, from the semantic viewpoint, the new expression has
1652 side-effects only if the old one did, it is not possible, from
1653 the technical viewpoint, to evaluate the body of a function
1654 multiple times without serious havoc. */
1655 TREE_SIDE_EFFECTS (expr) = 1;
1657 tsi_link_before (&id->tsi, expr, TSI_SAME_STMT);
1659 /* If the inlined function returns a result that we care about,
1660 then we're going to need to splice in a MODIFY_EXPR. Otherwise
1661 the call was a standalone statement and we can just replace it
1662 with the BIND_EXPR inline representation of the called function. */
1663 if (!use_retvar || !modify_dest)
1664 *tsi_stmt_ptr (id->tsi) = build_empty_stmt ();
1665 else
1666 *tp = use_retvar;
1668 /* When we gimplify a function call, we may clear TREE_SIDE_EFFECTS on
1669 the call if it is to a "const" function. Thus the copy of
1670 TREE_SIDE_EFFECTS from the CALL_EXPR to the BIND_EXPR above with
1671 result in TREE_SIDE_EFFECTS not being set for the inlined copy of a
1672 "const" function.
1674 Unfortunately, that is wrong as inlining the function can create/expose
1675 interesting side effects (such as setting of a return value).
1677 The easiest solution is to simply recalculate TREE_SIDE_EFFECTS for
1678 the toplevel expression. */
1679 recalculate_side_effects (expr);
1681 /* Output the inlining info for this abstract function, since it has been
1682 inlined. If we don't do this now, we can lose the information about the
1683 variables in the function when the blocks get blown away as soon as we
1684 remove the cgraph node. */
1685 (*debug_hooks->outlining_inline_function) (edge->callee->decl);
1687 /* Update callgraph if needed. */
1688 cgraph_remove_node (edge->callee);
1690 /* Recurse into the body of the just inlined function. */
1691 expand_calls_inline (inlined_body, id);
1692 VARRAY_POP (id->fns);
1694 /* Don't walk into subtrees. We've already handled them above. */
1695 *walk_subtrees = 0;
1697 lang_hooks.tree_inlining.end_inlining (fn);
1699 /* Keep iterating. */
1700 egress:
1701 input_location = saved_location;
1702 return NULL_TREE;
1705 static void
1706 expand_calls_inline (tree *stmt_p, inline_data *id)
1708 tree stmt = *stmt_p;
1709 enum tree_code code = TREE_CODE (stmt);
1710 int dummy;
1712 switch (code)
1714 case STATEMENT_LIST:
1716 tree_stmt_iterator i;
1717 tree new;
1719 for (i = tsi_start (stmt); !tsi_end_p (i); )
1721 id->tsi = i;
1722 expand_calls_inline (tsi_stmt_ptr (i), id);
1724 new = tsi_stmt (i);
1725 if (TREE_CODE (new) == STATEMENT_LIST)
1727 tsi_link_before (&i, new, TSI_SAME_STMT);
1728 tsi_delink (&i);
1730 else
1731 tsi_next (&i);
1734 break;
1736 case COND_EXPR:
1737 expand_calls_inline (&COND_EXPR_THEN (stmt), id);
1738 expand_calls_inline (&COND_EXPR_ELSE (stmt), id);
1739 break;
1741 case CATCH_EXPR:
1742 expand_calls_inline (&CATCH_BODY (stmt), id);
1743 break;
1745 case EH_FILTER_EXPR:
1746 expand_calls_inline (&EH_FILTER_FAILURE (stmt), id);
1747 break;
1749 case TRY_CATCH_EXPR:
1750 case TRY_FINALLY_EXPR:
1751 expand_calls_inline (&TREE_OPERAND (stmt, 0), id);
1752 expand_calls_inline (&TREE_OPERAND (stmt, 1), id);
1753 break;
1755 case BIND_EXPR:
1756 expand_calls_inline (&BIND_EXPR_BODY (stmt), id);
1757 break;
1759 case COMPOUND_EXPR:
1760 /* We're gimple. We should have gotten rid of all these. */
1761 gcc_unreachable ();
1763 case RETURN_EXPR:
1764 stmt_p = &TREE_OPERAND (stmt, 0);
1765 stmt = *stmt_p;
1766 if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
1767 break;
1769 /* FALLTHRU */
1771 case MODIFY_EXPR:
1772 stmt_p = &TREE_OPERAND (stmt, 1);
1773 stmt = *stmt_p;
1774 if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
1776 stmt_p = &TREE_OPERAND (stmt, 0);
1777 stmt = *stmt_p;
1779 if (TREE_CODE (stmt) != CALL_EXPR)
1780 break;
1782 /* FALLTHRU */
1784 case CALL_EXPR:
1785 expand_call_inline (stmt_p, &dummy, id);
1786 break;
1788 default:
1789 break;
1793 /* Expand calls to inline functions in the body of FN. */
1795 void
1796 optimize_inline_calls (tree fn)
1798 inline_data id;
1799 tree prev_fn;
1801 /* There is no point in performing inlining if errors have already
1802 occurred -- and we might crash if we try to inline invalid
1803 code. */
1804 if (errorcount || sorrycount)
1805 return;
1807 /* Clear out ID. */
1808 memset (&id, 0, sizeof (id));
1810 id.current_node = id.node = cgraph_node (fn);
1811 /* Don't allow recursion into FN. */
1812 VARRAY_TREE_INIT (id.fns, 32, "fns");
1813 VARRAY_PUSH_TREE (id.fns, fn);
1814 /* Or any functions that aren't finished yet. */
1815 prev_fn = NULL_TREE;
1816 if (current_function_decl)
1818 VARRAY_PUSH_TREE (id.fns, current_function_decl);
1819 prev_fn = current_function_decl;
1822 prev_fn = lang_hooks.tree_inlining.add_pending_fn_decls (&id.fns, prev_fn);
1824 /* Keep track of the low-water mark, i.e., the point where the first
1825 real inlining is represented in ID.FNS. */
1826 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1828 /* Replace all calls to inline functions with the bodies of those
1829 functions. */
1830 id.tree_pruner = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
1831 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1833 /* Clean up. */
1834 htab_delete (id.tree_pruner);
1836 #ifdef ENABLE_CHECKING
1838 struct cgraph_edge *e;
1840 verify_cgraph_node (id.node);
1842 /* Double check that we inlined everything we are supposed to inline. */
1843 for (e = id.node->callees; e; e = e->next_callee)
1844 gcc_assert (e->inline_failed);
1846 #endif
1849 /* FN is a function that has a complete body, and CLONE is a function whose
1850 body is to be set to a copy of FN, mapping argument declarations according
1851 to the ARG_MAP splay_tree. */
1853 void
1854 clone_body (tree clone, tree fn, void *arg_map)
1856 inline_data id;
1858 /* Clone the body, as if we were making an inline call. But, remap the
1859 parameters in the callee to the parameters of caller. If there's an
1860 in-charge parameter, map it to an appropriate constant. */
1861 memset (&id, 0, sizeof (id));
1862 VARRAY_TREE_INIT (id.fns, 2, "fns");
1863 VARRAY_PUSH_TREE (id.fns, clone);
1864 VARRAY_PUSH_TREE (id.fns, fn);
1865 id.decl_map = (splay_tree)arg_map;
1867 /* Cloning is treated slightly differently from inlining. Set
1868 CLONING_P so that it's clear which operation we're performing. */
1869 id.cloning_p = true;
1871 /* Actually copy the body. */
1872 append_to_statement_list_force (copy_body (&id), &DECL_SAVED_TREE (clone));
1875 /* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
1876 in *arg_copy and of the static chain, if any, in *sc_copy. */
1878 tree
1879 save_body (tree fn, tree *arg_copy, tree *sc_copy)
1881 inline_data id;
1882 tree body, *parg;
1884 memset (&id, 0, sizeof (id));
1885 VARRAY_TREE_INIT (id.fns, 1, "fns");
1886 VARRAY_PUSH_TREE (id.fns, fn);
1887 id.node = cgraph_node (fn);
1888 id.saving_p = true;
1889 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
1890 *arg_copy = DECL_ARGUMENTS (fn);
1892 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
1894 tree new = copy_node (*parg);
1896 lang_hooks.dup_lang_specific_decl (new);
1897 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
1898 insert_decl_map (&id, *parg, new);
1899 TREE_CHAIN (new) = TREE_CHAIN (*parg);
1900 *parg = new;
1903 *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1904 if (*sc_copy)
1906 tree new = copy_node (*sc_copy);
1908 lang_hooks.dup_lang_specific_decl (new);
1909 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
1910 insert_decl_map (&id, *sc_copy, new);
1911 TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
1912 *sc_copy = new;
1915 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
1917 /* Actually copy the body. */
1918 body = copy_body (&id);
1920 /* Clean up. */
1921 splay_tree_delete (id.decl_map);
1922 return body;
1925 #define WALK_SUBTREE(NODE) \
1926 do \
1928 result = walk_tree (&(NODE), func, data, pset); \
1929 if (result) \
1930 return result; \
1932 while (0)
1934 /* This is a subroutine of walk_tree that walks field of TYPE that are to
1935 be walked whenever a type is seen in the tree. Rest of operands and return
1936 value are as for walk_tree. */
1938 static tree
1939 walk_type_fields (tree type, walk_tree_fn func, void *data,
1940 struct pointer_set_t *pset)
1942 tree result = NULL_TREE;
1944 switch (TREE_CODE (type))
1946 case POINTER_TYPE:
1947 case REFERENCE_TYPE:
1948 /* We have to worry about mutually recursive pointers. These can't
1949 be written in C. They can in Ada. It's pathological, but
1950 there's an ACATS test (c38102a) that checks it. Deal with this
1951 by checking if we're pointing to another pointer, that one
1952 points to another pointer, that one does too, and we have no htab.
1953 If so, get a hash table. We check three levels deep to avoid
1954 the cost of the hash table if we don't need one. */
1955 if (POINTER_TYPE_P (TREE_TYPE (type))
1956 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
1957 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
1958 && !pset)
1960 result = walk_tree_without_duplicates (&TREE_TYPE (type),
1961 func, data);
1962 if (result)
1963 return result;
1965 break;
1968 /* ... fall through ... */
1970 case COMPLEX_TYPE:
1971 WALK_SUBTREE (TREE_TYPE (type));
1972 break;
1974 case METHOD_TYPE:
1975 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
1977 /* Fall through. */
1979 case FUNCTION_TYPE:
1980 WALK_SUBTREE (TREE_TYPE (type));
1982 tree arg;
1984 /* We never want to walk into default arguments. */
1985 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
1986 WALK_SUBTREE (TREE_VALUE (arg));
1988 break;
1990 case ARRAY_TYPE:
1991 /* Don't follow this nodes's type if a pointer for fear that we'll
1992 have infinite recursion. Those types are uninteresting anyway. */
1993 if (!POINTER_TYPE_P (TREE_TYPE (type))
1994 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE)
1995 WALK_SUBTREE (TREE_TYPE (type));
1996 WALK_SUBTREE (TYPE_DOMAIN (type));
1997 break;
1999 case BOOLEAN_TYPE:
2000 case ENUMERAL_TYPE:
2001 case INTEGER_TYPE:
2002 case CHAR_TYPE:
2003 case REAL_TYPE:
2004 WALK_SUBTREE (TYPE_MIN_VALUE (type));
2005 WALK_SUBTREE (TYPE_MAX_VALUE (type));
2006 break;
2008 case OFFSET_TYPE:
2009 WALK_SUBTREE (TREE_TYPE (type));
2010 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
2011 break;
2013 default:
2014 break;
2017 return NULL_TREE;
2020 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
2021 called with the DATA and the address of each sub-tree. If FUNC returns a
2022 non-NULL value, the traversal is aborted, and the value returned by FUNC
2023 is returned. If PSET is non-NULL it is used to record the nodes visited,
2024 and to avoid visiting a node more than once. */
2026 tree
2027 walk_tree (tree *tp, walk_tree_fn func, void *data, struct pointer_set_t *pset)
2029 enum tree_code code;
2030 int walk_subtrees;
2031 tree result;
2033 #define WALK_SUBTREE_TAIL(NODE) \
2034 do \
2036 tp = & (NODE); \
2037 goto tail_recurse; \
2039 while (0)
2041 tail_recurse:
2042 /* Skip empty subtrees. */
2043 if (!*tp)
2044 return NULL_TREE;
2046 /* Don't walk the same tree twice, if the user has requested
2047 that we avoid doing so. */
2048 if (pset && pointer_set_insert (pset, *tp))
2049 return NULL_TREE;
2051 /* Call the function. */
2052 walk_subtrees = 1;
2053 result = (*func) (tp, &walk_subtrees, data);
2055 /* If we found something, return it. */
2056 if (result)
2057 return result;
2059 code = TREE_CODE (*tp);
2061 /* Even if we didn't, FUNC may have decided that there was nothing
2062 interesting below this point in the tree. */
2063 if (!walk_subtrees)
2065 if (code == TREE_LIST)
2066 /* But we still need to check our siblings. */
2067 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2068 else
2069 return NULL_TREE;
2072 result = lang_hooks.tree_inlining.walk_subtrees (tp, &walk_subtrees, func,
2073 data, pset);
2074 if (result || ! walk_subtrees)
2075 return result;
2077 /* If this is a DECL_EXPR, walk into various fields of the type that it's
2078 defining. We only want to walk into these fields of a type in this
2079 case. Note that decls get walked as part of the processing of a
2080 BIND_EXPR.
2082 ??? Precisely which fields of types that we are supposed to walk in
2083 this case vs. the normal case aren't well defined. */
2084 if (code == DECL_EXPR
2085 && TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL
2086 && TREE_CODE (TREE_TYPE (DECL_EXPR_DECL (*tp))) != ERROR_MARK)
2088 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
2090 /* Call the function for the type. See if it returns anything or
2091 doesn't want us to continue. If we are to continue, walk both
2092 the normal fields and those for the declaration case. */
2093 result = (*func) (type_p, &walk_subtrees, data);
2094 if (result || !walk_subtrees)
2095 return NULL_TREE;
2097 result = walk_type_fields (*type_p, func, data, pset);
2098 if (result)
2099 return result;
2101 WALK_SUBTREE (TYPE_SIZE (*type_p));
2102 WALK_SUBTREE (TYPE_SIZE_UNIT (*type_p));
2104 /* If this is a record type, also walk the fields. */
2105 if (TREE_CODE (*type_p) == RECORD_TYPE
2106 || TREE_CODE (*type_p) == UNION_TYPE
2107 || TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2109 tree field;
2111 for (field = TYPE_FIELDS (*type_p); field;
2112 field = TREE_CHAIN (field))
2114 /* We'd like to look at the type of the field, but we can easily
2115 get infinite recursion. So assume it's pointed to elsewhere
2116 in the tree. Also, ignore things that aren't fields. */
2117 if (TREE_CODE (field) != FIELD_DECL)
2118 continue;
2120 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
2121 WALK_SUBTREE (DECL_SIZE (field));
2122 WALK_SUBTREE (DECL_SIZE_UNIT (field));
2123 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2124 WALK_SUBTREE (DECL_QUALIFIER (field));
2129 else if (code != SAVE_EXPR
2130 && code != BIND_EXPR
2131 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2133 int i, len;
2135 /* Walk over all the sub-trees of this operand. */
2136 len = TREE_CODE_LENGTH (code);
2137 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
2138 But, we only want to walk once. */
2139 if (code == TARGET_EXPR
2140 && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
2141 --len;
2143 /* Go through the subtrees. We need to do this in forward order so
2144 that the scope of a FOR_EXPR is handled properly. */
2145 #ifdef DEBUG_WALK_TREE
2146 for (i = 0; i < len; ++i)
2147 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2148 #else
2149 for (i = 0; i < len - 1; ++i)
2150 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2152 if (len)
2154 /* The common case is that we may tail recurse here. */
2155 if (code != BIND_EXPR
2156 && !TREE_CHAIN (*tp))
2157 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
2158 else
2159 WALK_SUBTREE (TREE_OPERAND (*tp, len - 1));
2161 #endif
2164 /* If this is a type, walk the needed fields in the type. */
2165 else if (TYPE_P (*tp))
2167 result = walk_type_fields (*tp, func, data, pset);
2168 if (result)
2169 return result;
2171 else
2173 /* Not one of the easy cases. We must explicitly go through the
2174 children. */
2175 switch (code)
2177 case ERROR_MARK:
2178 case IDENTIFIER_NODE:
2179 case INTEGER_CST:
2180 case REAL_CST:
2181 case VECTOR_CST:
2182 case STRING_CST:
2183 case BLOCK:
2184 case PLACEHOLDER_EXPR:
2185 case SSA_NAME:
2186 case FIELD_DECL:
2187 case RESULT_DECL:
2188 /* None of thse have subtrees other than those already walked
2189 above. */
2190 break;
2192 case TREE_LIST:
2193 WALK_SUBTREE (TREE_VALUE (*tp));
2194 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2195 break;
2197 case TREE_VEC:
2199 int len = TREE_VEC_LENGTH (*tp);
2201 if (len == 0)
2202 break;
2204 /* Walk all elements but the first. */
2205 while (--len)
2206 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
2208 /* Now walk the first one as a tail call. */
2209 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
2212 case COMPLEX_CST:
2213 WALK_SUBTREE (TREE_REALPART (*tp));
2214 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
2216 case CONSTRUCTOR:
2217 WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
2219 case SAVE_EXPR:
2220 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
2222 case BIND_EXPR:
2224 tree decl;
2225 for (decl = BIND_EXPR_VARS (*tp); decl; decl = TREE_CHAIN (decl))
2227 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
2228 into declarations that are just mentioned, rather than
2229 declared; they don't really belong to this part of the tree.
2230 And, we can see cycles: the initializer for a declaration
2231 can refer to the declaration itself. */
2232 WALK_SUBTREE (DECL_INITIAL (decl));
2233 WALK_SUBTREE (DECL_SIZE (decl));
2234 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
2236 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
2239 case STATEMENT_LIST:
2241 tree_stmt_iterator i;
2242 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
2243 WALK_SUBTREE (*tsi_stmt_ptr (i));
2245 break;
2247 default:
2248 /* ??? This could be a language-defined node. We really should make
2249 a hook for it, but right now just ignore it. */
2250 break;
2254 /* We didn't find what we were looking for. */
2255 return NULL_TREE;
2257 #undef WALK_SUBTREE
2258 #undef WALK_SUBTREE_TAIL
2261 /* Like walk_tree, but does not walk duplicate nodes more than once. */
2263 tree
2264 walk_tree_without_duplicates (tree *tp, walk_tree_fn func, void *data)
2266 tree result;
2267 struct pointer_set_t *pset;
2269 pset = pointer_set_create ();
2270 result = walk_tree (tp, func, data, pset);
2271 pointer_set_destroy (pset);
2272 return result;
2275 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2277 tree
2278 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2280 enum tree_code code = TREE_CODE (*tp);
2282 /* We make copies of most nodes. */
2283 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2284 || code == TREE_LIST
2285 || code == TREE_VEC
2286 || code == TYPE_DECL)
2288 /* Because the chain gets clobbered when we make a copy, we save it
2289 here. */
2290 tree chain = TREE_CHAIN (*tp);
2291 tree new;
2293 /* Copy the node. */
2294 new = copy_node (*tp);
2296 /* Propagate mudflap marked-ness. */
2297 if (flag_mudflap && mf_marked_p (*tp))
2298 mf_mark (new);
2300 *tp = new;
2302 /* Now, restore the chain, if appropriate. That will cause
2303 walk_tree to walk into the chain as well. */
2304 if (code == PARM_DECL || code == TREE_LIST)
2305 TREE_CHAIN (*tp) = chain;
2307 /* For now, we don't update BLOCKs when we make copies. So, we
2308 have to nullify all BIND_EXPRs. */
2309 if (TREE_CODE (*tp) == BIND_EXPR)
2310 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2313 else if (TREE_CODE_CLASS (code) == tcc_type)
2314 *walk_subtrees = 0;
2315 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2316 *walk_subtrees = 0;
2317 else if (TREE_CODE_CLASS (code) == tcc_constant)
2318 *walk_subtrees = 0;
2319 else
2320 gcc_assert (code != STATEMENT_LIST);
2321 return NULL_TREE;
2324 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2325 information indicating to what new SAVE_EXPR this one should be mapped,
2326 use that one. Otherwise, create a new node and enter it in ST. */
2328 static void
2329 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2331 splay_tree st = (splay_tree) st_;
2332 splay_tree_node n;
2333 tree t;
2335 /* See if we already encountered this SAVE_EXPR. */
2336 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2338 /* If we didn't already remap this SAVE_EXPR, do so now. */
2339 if (!n)
2341 t = copy_node (*tp);
2343 /* Remember this SAVE_EXPR. */
2344 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2345 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2346 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2348 else
2350 /* We've already walked into this SAVE_EXPR; don't do it again. */
2351 *walk_subtrees = 0;
2352 t = (tree) n->value;
2355 /* Replace this SAVE_EXPR with the copy. */
2356 *tp = t;
2359 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2360 copies the declaration and enters it in the splay_tree in DATA (which is
2361 really an `inline_data *'). */
2363 static tree
2364 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2365 void *data)
2367 inline_data *id = (inline_data *) data;
2369 /* Don't walk into types. */
2370 if (TYPE_P (*tp))
2371 *walk_subtrees = 0;
2373 else if (TREE_CODE (*tp) == LABEL_EXPR)
2375 tree decl = TREE_OPERAND (*tp, 0);
2377 /* Copy the decl and remember the copy. */
2378 insert_decl_map (id, decl,
2379 copy_decl_for_inlining (decl, DECL_CONTEXT (decl),
2380 DECL_CONTEXT (decl)));
2383 return NULL_TREE;
2386 /* Perform any modifications to EXPR required when it is unsaved. Does
2387 not recurse into EXPR's subtrees. */
2389 static void
2390 unsave_expr_1 (tree expr)
2392 switch (TREE_CODE (expr))
2394 case TARGET_EXPR:
2395 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2396 It's OK for this to happen if it was part of a subtree that
2397 isn't immediately expanded, such as operand 2 of another
2398 TARGET_EXPR. */
2399 if (TREE_OPERAND (expr, 1))
2400 break;
2402 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2403 TREE_OPERAND (expr, 3) = NULL_TREE;
2404 break;
2406 default:
2407 break;
2411 /* Called via walk_tree when an expression is unsaved. Using the
2412 splay_tree pointed to by ST (which is really a `splay_tree'),
2413 remaps all local declarations to appropriate replacements. */
2415 static tree
2416 unsave_r (tree *tp, int *walk_subtrees, void *data)
2418 inline_data *id = (inline_data *) data;
2419 splay_tree st = id->decl_map;
2420 splay_tree_node n;
2422 /* Only a local declaration (variable or label). */
2423 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2424 || TREE_CODE (*tp) == LABEL_DECL)
2426 /* Lookup the declaration. */
2427 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2429 /* If it's there, remap it. */
2430 if (n)
2431 *tp = (tree) n->value;
2434 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2435 copy_statement_list (tp);
2436 else if (TREE_CODE (*tp) == BIND_EXPR)
2437 copy_bind_expr (tp, walk_subtrees, id);
2438 else if (TREE_CODE (*tp) == SAVE_EXPR)
2439 remap_save_expr (tp, st, walk_subtrees);
2440 else
2442 copy_tree_r (tp, walk_subtrees, NULL);
2444 /* Do whatever unsaving is required. */
2445 unsave_expr_1 (*tp);
2448 /* Keep iterating. */
2449 return NULL_TREE;
2452 /* Copies everything in EXPR and replaces variables, labels
2453 and SAVE_EXPRs local to EXPR. */
2455 tree
2456 unsave_expr_now (tree expr)
2458 inline_data id;
2460 /* There's nothing to do for NULL_TREE. */
2461 if (expr == 0)
2462 return expr;
2464 /* Set up ID. */
2465 memset (&id, 0, sizeof (id));
2466 VARRAY_TREE_INIT (id.fns, 1, "fns");
2467 VARRAY_PUSH_TREE (id.fns, current_function_decl);
2468 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2470 /* Walk the tree once to find local labels. */
2471 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2473 /* Walk the tree again, copying, remapping, and unsaving. */
2474 walk_tree (&expr, unsave_r, &id, NULL);
2476 /* Clean up. */
2477 splay_tree_delete (id.decl_map);
2479 return expr;
2482 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2484 static tree
2485 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2487 if (*tp == data)
2488 return (tree) data;
2489 else
2490 return NULL;
2493 bool
2494 debug_find_tree (tree top, tree search)
2496 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2499 /* Declare the variables created by the inliner. Add all the variables in
2500 VARS to BIND_EXPR. */
2502 static void
2503 declare_inline_vars (tree bind_expr, tree vars)
2505 tree t;
2506 for (t = vars; t; t = TREE_CHAIN (t))
2507 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2509 add_var_to_bind_expr (bind_expr, vars);