Mark ChangeLog
[official-gcc.git] / gcc / tree-inline.c
blob77f87bb9da81da2f1656f676b27d71c4ae14f8db
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "integrate.h"
36 #include "varray.h"
37 #include "hashtab.h"
38 #include "pointer-set.h"
39 #include "splay-tree.h"
40 #include "langhooks.h"
41 #include "cgraph.h"
42 #include "intl.h"
43 #include "tree-mudflap.h"
44 #include "tree-flow.h"
45 #include "function.h"
46 #include "diagnostic.h"
47 #include "debug.h"
49 /* I'm not real happy about this, but we need to handle gimple and
50 non-gimple trees. */
51 #include "tree-iterator.h"
52 #include "tree-gimple.h"
54 /* 0 if we should not perform inlining.
55 1 if we should expand functions calls inline at the tree level.
56 2 if we should consider *all* functions to be inline
57 candidates. */
59 int flag_inline_trees = 0;
61 /* To Do:
63 o In order to make inlining-on-trees work, we pessimized
64 function-local static constants. In particular, they are now
65 always output, even when not addressed. Fix this by treating
66 function-local static constants just like global static
67 constants; the back-end already knows not to output them if they
68 are not needed.
70 o Provide heuristics to clamp inlining of recursive template
71 calls? */
73 /* Data required for function inlining. */
75 typedef struct inline_data
77 /* A stack of the functions we are inlining. For example, if we are
78 compiling `f', which calls `g', which calls `h', and we are
79 inlining the body of `h', the stack will contain, `h', followed
80 by `g', followed by `f'. The first few elements of the stack may
81 contain other functions that we know we should not recurse into,
82 even though they are not directly being inlined. */
83 varray_type fns;
84 /* The index of the first element of FNS that really represents an
85 inlined function. */
86 unsigned first_inlined_fn;
87 /* The label to jump to when a return statement is encountered. If
88 this value is NULL, then return statements will simply be
89 remapped as return statements, rather than as jumps. */
90 tree ret_label;
91 /* The VAR_DECL for the return value. */
92 tree retvar;
93 /* The map from local declarations in the inlined function to
94 equivalents in the function into which it is being inlined. */
95 splay_tree decl_map;
96 /* Nonzero if we are currently within the cleanup for a
97 TARGET_EXPR. */
98 int in_target_cleanup_p;
99 /* We use the same mechanism to build clones that we do to perform
100 inlining. However, there are a few places where we need to
101 distinguish between those two situations. This flag is true if
102 we are cloning, rather than inlining. */
103 bool cloning_p;
104 /* Similarly for saving function body. */
105 bool saving_p;
106 /* Hash table used to prevent walk_tree from visiting the same node
107 umpteen million times. */
108 htab_t tree_pruner;
109 /* Callgraph node of function we are inlining into. */
110 struct cgraph_node *node;
111 /* Callgraph node of currently inlined function. */
112 struct cgraph_node *current_node;
113 /* Statement iterator. We need this so we can keep the tree in
114 gimple form when we insert the inlined function. It is not
115 used when we are not dealing with gimple trees. */
116 tree_stmt_iterator tsi;
117 } inline_data;
119 /* Prototypes. */
121 /* The approximate number of instructions per statement. This number
122 need not be particularly accurate; it is used only to make
123 decisions about when a function is too big to inline. */
124 #define INSNS_PER_STMT (10)
126 static tree copy_body_r (tree *, int *, void *);
127 static tree copy_body (inline_data *);
128 static tree expand_call_inline (tree *, int *, void *);
129 static void expand_calls_inline (tree *, inline_data *);
130 static bool inlinable_function_p (tree);
131 static tree remap_decl (tree, inline_data *);
132 static tree remap_type (tree, inline_data *);
133 static tree initialize_inlined_parameters (inline_data *, tree,
134 tree, tree, tree);
135 static void remap_block (tree *, inline_data *);
136 static tree remap_decls (tree, inline_data *);
137 static void copy_bind_expr (tree *, int *, inline_data *);
138 static tree mark_local_for_remap_r (tree *, int *, void *);
139 static void unsave_expr_1 (tree);
140 static tree unsave_r (tree *, int *, void *);
141 static void declare_inline_vars (tree bind_expr, tree vars);
142 static void remap_save_expr (tree *, void *, int *);
144 /* Insert a tree->tree mapping for ID. Despite the name suggests
145 that the trees should be variables, it is used for more than that. */
147 static void
148 insert_decl_map (inline_data *id, tree key, tree value)
150 splay_tree_insert (id->decl_map, (splay_tree_key) key,
151 (splay_tree_value) value);
153 /* Always insert an identity map as well. If we see this same new
154 node again, we won't want to duplicate it a second time. */
155 if (key != value)
156 splay_tree_insert (id->decl_map, (splay_tree_key) value,
157 (splay_tree_value) value);
160 /* Remap DECL during the copying of the BLOCK tree for the function.
161 We are only called to remap local variables in the current function. */
163 static tree
164 remap_decl (tree decl, inline_data *id)
166 splay_tree_node n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
167 tree fn = VARRAY_TOP_TREE (id->fns);
169 /* See if we have remapped this declaration. If we didn't already have an
170 equivalent for this declaration, create one now. */
171 if (!n)
173 /* Make a copy of the variable or label. */
174 tree t = copy_decl_for_inlining (decl, fn, VARRAY_TREE (id->fns, 0));
176 /* Remember it, so that if we encounter this local entity again
177 we can reuse this copy. Do this early because remap_type may
178 need this decl for TYPE_STUB_DECL. */
179 insert_decl_map (id, decl, t);
181 /* Remap types, if necessary. */
182 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
183 if (TREE_CODE (t) == TYPE_DECL)
184 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
185 else if (TREE_CODE (t) == PARM_DECL)
186 DECL_ARG_TYPE_AS_WRITTEN (t)
187 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
189 /* Remap sizes as necessary. */
190 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
191 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
193 /* If fields, do likewise for offset and qualifier. */
194 if (TREE_CODE (t) == FIELD_DECL)
196 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
197 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
198 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
201 #if 0
202 /* FIXME handle anon aggrs. */
203 if (! DECL_NAME (t) && TREE_TYPE (t)
204 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
206 /* For a VAR_DECL of anonymous type, we must also copy the
207 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
208 tree members = NULL;
209 tree src;
211 for (src = DECL_ANON_UNION_ELEMS (t); src;
212 src = TREE_CHAIN (src))
214 tree member = remap_decl (TREE_VALUE (src), id);
216 gcc_assert (!TREE_PURPOSE (src));
217 members = tree_cons (NULL, member, members);
219 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
221 #endif
223 return t;
226 return unshare_expr ((tree) n->value);
229 static tree
230 remap_type (tree type, inline_data *id)
232 splay_tree_node node;
233 tree new, t;
235 if (type == NULL)
236 return type;
238 /* See if we have remapped this type. */
239 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
240 if (node)
241 return (tree) node->value;
243 /* The type only needs remapping if it's variably modified by a variable
244 in the function we are inlining. */
245 if (! variably_modified_type_p (type, VARRAY_TOP_TREE (id->fns)))
247 insert_decl_map (id, type, type);
248 return type;
251 /* We do need a copy. build and register it now. If this is a pointer or
252 reference type, remap the designated type and make a new pointer or
253 reference type. */
254 if (TREE_CODE (type) == POINTER_TYPE)
256 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
257 TYPE_MODE (type),
258 TYPE_REF_CAN_ALIAS_ALL (type));
259 insert_decl_map (id, type, new);
260 return new;
262 else if (TREE_CODE (type) == REFERENCE_TYPE)
264 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
265 TYPE_MODE (type),
266 TYPE_REF_CAN_ALIAS_ALL (type));
267 insert_decl_map (id, type, new);
268 return new;
270 else
271 new = copy_node (type);
273 insert_decl_map (id, type, new);
275 /* This is a new type, not a copy of an old type. Need to reassociate
276 variants. We can handle everything except the main variant lazily. */
277 t = TYPE_MAIN_VARIANT (type);
278 if (type != t)
280 t = remap_type (t, id);
281 TYPE_MAIN_VARIANT (new) = t;
282 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
283 TYPE_NEXT_VARIANT (t) = new;
285 else
287 TYPE_MAIN_VARIANT (new) = new;
288 TYPE_NEXT_VARIANT (new) = NULL;
291 if (TYPE_STUB_DECL (type))
292 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
294 /* Lazily create pointer and reference types. */
295 TYPE_POINTER_TO (new) = NULL;
296 TYPE_REFERENCE_TO (new) = NULL;
298 switch (TREE_CODE (new))
300 case INTEGER_TYPE:
301 case REAL_TYPE:
302 case ENUMERAL_TYPE:
303 case BOOLEAN_TYPE:
304 case CHAR_TYPE:
305 t = TYPE_MIN_VALUE (new);
306 if (t && TREE_CODE (t) != INTEGER_CST)
307 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
309 t = TYPE_MAX_VALUE (new);
310 if (t && TREE_CODE (t) != INTEGER_CST)
311 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
312 return new;
314 case FUNCTION_TYPE:
315 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
316 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
317 return new;
319 case ARRAY_TYPE:
320 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
321 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
322 break;
324 case RECORD_TYPE:
325 case UNION_TYPE:
326 case QUAL_UNION_TYPE:
327 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
328 break;
330 case FILE_TYPE:
331 case OFFSET_TYPE:
332 default:
333 /* Shouldn't have been thought variable sized. */
334 gcc_unreachable ();
337 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
338 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
340 return new;
343 static tree
344 remap_decls (tree decls, inline_data *id)
346 tree old_var;
347 tree new_decls = NULL_TREE;
349 /* Remap its variables. */
350 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
352 tree new_var;
354 /* Remap the variable. */
355 new_var = remap_decl (old_var, id);
357 /* If we didn't remap this variable, so we can't mess with its
358 TREE_CHAIN. If we remapped this variable to the return slot, it's
359 already declared somewhere else, so don't declare it here. */
360 if (!new_var || new_var == id->retvar)
362 else
364 gcc_assert (DECL_P (new_var));
365 TREE_CHAIN (new_var) = new_decls;
366 new_decls = new_var;
370 return nreverse (new_decls);
373 /* Copy the BLOCK to contain remapped versions of the variables
374 therein. And hook the new block into the block-tree. */
376 static void
377 remap_block (tree *block, inline_data *id)
379 tree old_block;
380 tree new_block;
381 tree fn;
383 /* Make the new block. */
384 old_block = *block;
385 new_block = make_node (BLOCK);
386 TREE_USED (new_block) = TREE_USED (old_block);
387 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
388 *block = new_block;
390 /* Remap its variables. */
391 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
393 fn = VARRAY_TREE (id->fns, 0);
394 #if 1
395 /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in
396 rest_of_compilation is a good start. */
397 if (id->cloning_p)
398 /* We're building a clone; DECL_INITIAL is still
399 error_mark_node, and current_binding_level is the parm
400 binding level. */
401 lang_hooks.decls.insert_block (new_block);
402 else
404 /* Attach this new block after the DECL_INITIAL block for the
405 function into which this block is being inlined. In
406 rest_of_compilation we will straighten out the BLOCK tree. */
407 tree *first_block;
408 if (DECL_INITIAL (fn))
409 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
410 else
411 first_block = &DECL_INITIAL (fn);
412 BLOCK_CHAIN (new_block) = *first_block;
413 *first_block = new_block;
415 #endif
416 /* Remember the remapped block. */
417 insert_decl_map (id, old_block, new_block);
420 static void
421 copy_statement_list (tree *tp)
423 tree_stmt_iterator oi, ni;
424 tree new;
426 new = alloc_stmt_list ();
427 ni = tsi_start (new);
428 oi = tsi_start (*tp);
429 *tp = new;
431 for (; !tsi_end_p (oi); tsi_next (&oi))
432 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
435 static void
436 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
438 tree block = BIND_EXPR_BLOCK (*tp);
439 /* Copy (and replace) the statement. */
440 copy_tree_r (tp, walk_subtrees, NULL);
441 if (block)
443 remap_block (&block, id);
444 BIND_EXPR_BLOCK (*tp) = block;
447 if (BIND_EXPR_VARS (*tp))
448 /* This will remap a lot of the same decls again, but this should be
449 harmless. */
450 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
453 /* Called from copy_body via walk_tree. DATA is really an `inline_data *'. */
455 static tree
456 copy_body_r (tree *tp, int *walk_subtrees, void *data)
458 inline_data *id = (inline_data *) data;
459 tree fn = VARRAY_TOP_TREE (id->fns);
461 #if 0
462 /* All automatic variables should have a DECL_CONTEXT indicating
463 what function they come from. */
464 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
465 && DECL_NAMESPACE_SCOPE_P (*tp))
466 gcc_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp));
467 #endif
469 /* If this is a RETURN_EXPR, change it into a MODIFY_EXPR and a
470 GOTO_EXPR with the RET_LABEL as its target. */
471 if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
473 tree return_stmt = *tp;
474 tree goto_stmt;
476 /* Build the GOTO_EXPR. */
477 tree assignment = TREE_OPERAND (return_stmt, 0);
478 goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
479 TREE_USED (id->ret_label) = 1;
481 /* If we're returning something, just turn that into an
482 assignment into the equivalent of the original
483 RESULT_DECL. */
484 if (assignment)
486 /* Do not create a statement containing a naked RESULT_DECL. */
487 if (TREE_CODE (assignment) == RESULT_DECL)
488 gimplify_stmt (&assignment);
490 *tp = build (BIND_EXPR, void_type_node, NULL, NULL, NULL);
491 append_to_statement_list (assignment, &BIND_EXPR_BODY (*tp));
492 append_to_statement_list (goto_stmt, &BIND_EXPR_BODY (*tp));
494 /* If we're not returning anything just do the jump. */
495 else
496 *tp = goto_stmt;
498 /* Local variables and labels need to be replaced by equivalent
499 variables. We don't want to copy static variables; there's only
500 one of those, no matter how many times we inline the containing
501 function. Similarly for globals from an outer function. */
502 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
504 tree new_decl;
506 /* Remap the declaration. */
507 new_decl = remap_decl (*tp, id);
508 gcc_assert (new_decl);
509 /* Replace this variable with the copy. */
510 STRIP_TYPE_NOPS (new_decl);
511 *tp = new_decl;
512 *walk_subtrees = 0;
514 else if (TREE_CODE (*tp) == STATEMENT_LIST)
515 copy_statement_list (tp);
516 else if (TREE_CODE (*tp) == SAVE_EXPR)
517 remap_save_expr (tp, id->decl_map, walk_subtrees);
518 else if (TREE_CODE (*tp) == BIND_EXPR)
519 copy_bind_expr (tp, walk_subtrees, id);
520 /* Types may need remapping as well. */
521 else if (TYPE_P (*tp))
522 *tp = remap_type (*tp, id);
524 /* If this is a constant, we have to copy the node iff the type will be
525 remapped. copy_tree_r will not copy a constant. */
526 else if (TREE_CODE_CLASS (TREE_CODE (*tp)) == tcc_constant)
528 tree new_type = remap_type (TREE_TYPE (*tp), id);
530 if (new_type == TREE_TYPE (*tp))
531 *walk_subtrees = 0;
533 else if (TREE_CODE (*tp) == INTEGER_CST)
534 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
535 TREE_INT_CST_HIGH (*tp));
536 else
538 *tp = copy_node (*tp);
539 TREE_TYPE (*tp) = new_type;
543 /* Otherwise, just copy the node. Note that copy_tree_r already
544 knows not to copy VAR_DECLs, etc., so this is safe. */
545 else
547 tree old_node = *tp;
549 if (TREE_CODE (*tp) == MODIFY_EXPR
550 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
551 && (lang_hooks.tree_inlining.auto_var_in_fn_p
552 (TREE_OPERAND (*tp, 0), fn)))
554 /* Some assignments VAR = VAR; don't generate any rtl code
555 and thus don't count as variable modification. Avoid
556 keeping bogosities like 0 = 0. */
557 tree decl = TREE_OPERAND (*tp, 0), value;
558 splay_tree_node n;
560 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
561 if (n)
563 value = (tree) n->value;
564 STRIP_TYPE_NOPS (value);
565 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
567 *tp = build_empty_stmt ();
568 return copy_body_r (tp, walk_subtrees, data);
572 else if (TREE_CODE (*tp) == INDIRECT_REF)
574 /* Get rid of *& from inline substitutions that can happen when a
575 pointer argument is an ADDR_EXPR. */
576 tree decl = TREE_OPERAND (*tp, 0), value;
577 splay_tree_node n;
579 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
580 if (n)
582 value = (tree) n->value;
583 STRIP_NOPS (value);
584 if (TREE_CODE (value) == ADDR_EXPR
585 && (lang_hooks.types_compatible_p
586 (TREE_TYPE (*tp), TREE_TYPE (TREE_OPERAND (value, 0)))))
588 *tp = TREE_OPERAND (value, 0);
589 return copy_body_r (tp, walk_subtrees, data);
594 copy_tree_r (tp, walk_subtrees, NULL);
596 if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp))
598 if (id->saving_p)
600 struct cgraph_node *node;
601 struct cgraph_edge *edge;
603 for (node = id->node->next_clone; node; node = node->next_clone)
605 edge = cgraph_edge (node, old_node);
606 gcc_assert (edge);
607 edge->call_expr = *tp;
610 else
612 struct cgraph_edge *edge
613 = cgraph_edge (id->current_node, old_node);
615 if (edge)
616 cgraph_clone_edge (edge, id->node, *tp);
620 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
622 /* The copied TARGET_EXPR has never been expanded, even if the
623 original node was expanded already. */
624 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
626 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
627 TREE_OPERAND (*tp, 3) = NULL_TREE;
630 /* Variable substitution need not be simple. In particular, the
631 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
632 and friends are up-to-date. */
633 else if (TREE_CODE (*tp) == ADDR_EXPR)
635 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
636 recompute_tree_invarant_for_addr_expr (*tp);
637 *walk_subtrees = 0;
641 /* Keep iterating. */
642 return NULL_TREE;
645 /* Make a copy of the body of FN so that it can be inserted inline in
646 another function. */
648 static tree
649 copy_body (inline_data *id)
651 tree body;
652 tree fndecl = VARRAY_TOP_TREE (id->fns);
654 if (fndecl == current_function_decl
655 && cfun->saved_tree)
656 body = cfun->saved_tree;
657 else
658 body = DECL_SAVED_TREE (fndecl);
659 walk_tree (&body, copy_body_r, id, NULL);
661 return body;
664 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
665 defined in function FN, or of a data member thereof. */
667 static bool
668 self_inlining_addr_expr (tree value, tree fn)
670 tree var;
672 if (TREE_CODE (value) != ADDR_EXPR)
673 return false;
675 var = get_base_address (TREE_OPERAND (value, 0));
677 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
680 static void
681 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
682 tree *init_stmts, tree *vars, bool *gimplify_init_stmts_p)
684 tree init_stmt;
685 tree var;
687 /* If the parameter is never assigned to, we may not need to
688 create a new variable here at all. Instead, we may be able
689 to just use the argument value. */
690 if (TREE_READONLY (p)
691 && !TREE_ADDRESSABLE (p)
692 && value && !TREE_SIDE_EFFECTS (value))
694 /* We can't risk substituting complex expressions. They
695 might contain variables that will be assigned to later.
696 Theoretically, we could check the expression to see if
697 all of the variables that determine its value are
698 read-only, but we don't bother. */
699 /* We may produce non-gimple trees by adding NOPs or introduce
700 invalid sharing when operand is not really constant.
701 It is not big deal to prohibit constant propagation here as
702 we will constant propagate in DOM1 pass anyway. */
703 if (is_gimple_min_invariant (value)
704 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
705 /* We have to be very careful about ADDR_EXPR. Make sure
706 the base variable isn't a local variable of the inlined
707 function, e.g., when doing recursive inlining, direct or
708 mutually-recursive or whatever, which is why we don't
709 just test whether fn == current_function_decl. */
710 && ! self_inlining_addr_expr (value, fn))
712 insert_decl_map (id, p, value);
713 return;
717 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
718 here since the type of this decl must be visible to the calling
719 function. */
720 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
722 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
723 that way, when the PARM_DECL is encountered, it will be
724 automatically replaced by the VAR_DECL. */
725 insert_decl_map (id, p, var);
727 /* Declare this new variable. */
728 TREE_CHAIN (var) = *vars;
729 *vars = var;
731 /* Make gimplifier happy about this variable. */
732 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
734 /* Even if P was TREE_READONLY, the new VAR should not be.
735 In the original code, we would have constructed a
736 temporary, and then the function body would have never
737 changed the value of P. However, now, we will be
738 constructing VAR directly. The constructor body may
739 change its value multiple times as it is being
740 constructed. Therefore, it must not be TREE_READONLY;
741 the back-end assumes that TREE_READONLY variable is
742 assigned to only once. */
743 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
744 TREE_READONLY (var) = 0;
746 /* Initialize this VAR_DECL from the equivalent argument. Convert
747 the argument to the proper type in case it was promoted. */
748 if (value)
750 tree rhs = fold_convert (TREE_TYPE (var), value);
752 if (rhs == error_mark_node)
753 return;
755 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
756 keep our trees in gimple form. */
757 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
758 append_to_statement_list (init_stmt, init_stmts);
760 /* If we did not create a gimple value and we did not create a gimple
761 cast of a gimple value, then we will need to gimplify INIT_STMTS
762 at the end. Note that is_gimple_cast only checks the outer
763 tree code, not its operand. Thus the explicit check that it's
764 operand is a gimple value. */
765 if (!is_gimple_val (rhs)
766 && (!is_gimple_cast (rhs)
767 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
768 *gimplify_init_stmts_p = true;
772 /* Generate code to initialize the parameters of the function at the
773 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
775 static tree
776 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
777 tree fn, tree bind_expr)
779 tree init_stmts = NULL_TREE;
780 tree parms;
781 tree a;
782 tree p;
783 tree vars = NULL_TREE;
784 bool gimplify_init_stmts_p = false;
785 int argnum = 0;
787 /* Figure out what the parameters are. */
788 parms = DECL_ARGUMENTS (fn);
789 if (fn == current_function_decl)
790 parms = cfun->saved_args;
792 /* Loop through the parameter declarations, replacing each with an
793 equivalent VAR_DECL, appropriately initialized. */
794 for (p = parms, a = args; p;
795 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
797 tree value;
799 ++argnum;
801 /* Find the initializer. */
802 value = lang_hooks.tree_inlining.convert_parm_for_inlining
803 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
805 setup_one_parameter (id, p, value, fn, &init_stmts, &vars,
806 &gimplify_init_stmts_p);
809 /* Evaluate trailing arguments. */
810 for (; a; a = TREE_CHAIN (a))
812 tree value = TREE_VALUE (a);
813 append_to_statement_list (value, &init_stmts);
816 /* Initialize the static chain. */
817 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
818 if (fn == current_function_decl)
819 p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
820 if (p)
822 /* No static chain? Seems like a bug in tree-nested.c. */
823 gcc_assert (static_chain);
825 setup_one_parameter (id, p, static_chain, fn, &init_stmts, &vars,
826 &gimplify_init_stmts_p);
829 if (gimplify_init_stmts_p)
830 gimplify_body (&init_stmts, current_function_decl, false);
832 declare_inline_vars (bind_expr, vars);
833 return init_stmts;
836 /* Declare a return variable to replace the RESULT_DECL for the function we
837 are calling. RETURN_SLOT_ADDR, if non-null, was a fake parameter that
838 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
839 the MODIFY_EXPR to which this call is the RHS.
841 The return value is a (possibly null) value that is the result of the
842 function as seen by the callee. *USE_P is a (possibly null) value that
843 holds the result as seen by the caller. */
845 static tree
846 declare_return_variable (inline_data *id, tree return_slot_addr,
847 tree modify_dest, tree *use_p)
849 tree callee = VARRAY_TOP_TREE (id->fns);
850 tree caller = VARRAY_TREE (id->fns, 0);
851 tree result = DECL_RESULT (callee);
852 tree callee_type = TREE_TYPE (result);
853 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
854 tree var, use;
856 /* We don't need to do anything for functions that don't return
857 anything. */
858 if (!result || VOID_TYPE_P (callee_type))
860 *use_p = NULL_TREE;
861 return NULL_TREE;
864 /* If there was a return slot, then the return value is the
865 dereferenced address of that object. */
866 if (return_slot_addr)
868 /* The front end shouldn't have used both return_slot_addr and
869 a modify expression. */
870 gcc_assert (!modify_dest);
871 if (DECL_BY_REFERENCE (result))
872 var = return_slot_addr;
873 else
874 var = build_fold_indirect_ref (return_slot_addr);
875 use = NULL;
876 goto done;
879 /* All types requiring non-trivial constructors should have been handled. */
880 gcc_assert (!TREE_ADDRESSABLE (callee_type));
882 /* Attempt to avoid creating a new temporary variable. */
883 if (modify_dest)
885 bool use_it = false;
887 /* We can't use MODIFY_DEST if there's type promotion involved. */
888 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
889 use_it = false;
891 /* ??? If we're assigning to a variable sized type, then we must
892 reuse the destination variable, because we've no good way to
893 create variable sized temporaries at this point. */
894 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
895 use_it = true;
897 /* If the callee cannot possibly modify MODIFY_DEST, then we can
898 reuse it as the result of the call directly. Don't do this if
899 it would promote MODIFY_DEST to addressable. */
900 else if (TREE_ADDRESSABLE (result))
901 use_it = false;
902 else
904 tree base_m = get_base_address (modify_dest);
906 /* If the base isn't a decl, then it's a pointer, and we don't
907 know where that's going to go. */
908 if (!DECL_P (base_m))
909 use_it = false;
910 else if (is_global_var (base_m))
911 use_it = false;
912 else if (!TREE_ADDRESSABLE (base_m))
913 use_it = true;
916 if (use_it)
918 var = modify_dest;
919 use = NULL;
920 goto done;
924 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
926 var = copy_decl_for_inlining (result, callee, caller);
927 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
928 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
929 = tree_cons (NULL_TREE, var,
930 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
932 /* Do not have the rest of GCC warn about this variable as it should
933 not be visible to the user. */
934 TREE_NO_WARNING (var) = 1;
936 /* Build the use expr. If the return type of the function was
937 promoted, convert it back to the expected type. */
938 use = var;
939 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
940 use = fold_convert (caller_type, var);
942 done:
943 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
944 way, when the RESULT_DECL is encountered, it will be
945 automatically replaced by the VAR_DECL. */
946 insert_decl_map (id, result, var);
948 /* Remember this so we can ignore it in remap_decls. */
949 id->retvar = var;
951 *use_p = use;
952 return var;
955 /* Returns nonzero if a function can be inlined as a tree. */
957 bool
958 tree_inlinable_function_p (tree fn)
960 return inlinable_function_p (fn);
963 static const char *inline_forbidden_reason;
965 static tree
966 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
967 void *fnp)
969 tree node = *nodep;
970 tree fn = (tree) fnp;
971 tree t;
973 switch (TREE_CODE (node))
975 case CALL_EXPR:
976 /* Refuse to inline alloca call unless user explicitly forced so as
977 this may change program's memory overhead drastically when the
978 function using alloca is called in loop. In GCC present in
979 SPEC2000 inlining into schedule_block cause it to require 2GB of
980 RAM instead of 256MB. */
981 if (alloca_call_p (node)
982 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
984 inline_forbidden_reason
985 = G_("%Jfunction %qF can never be inlined because it uses "
986 "alloca (override using the always_inline attribute)");
987 return node;
989 t = get_callee_fndecl (node);
990 if (! t)
991 break;
993 /* We cannot inline functions that call setjmp. */
994 if (setjmp_call_p (t))
996 inline_forbidden_reason
997 = G_("%Jfunction %qF can never be inlined because it uses setjmp");
998 return node;
1001 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1002 switch (DECL_FUNCTION_CODE (t))
1004 /* We cannot inline functions that take a variable number of
1005 arguments. */
1006 case BUILT_IN_VA_START:
1007 case BUILT_IN_STDARG_START:
1008 case BUILT_IN_NEXT_ARG:
1009 case BUILT_IN_VA_END:
1010 inline_forbidden_reason
1011 = G_("%Jfunction %qF can never be inlined because it "
1012 "uses variable argument lists");
1013 return node;
1015 case BUILT_IN_LONGJMP:
1016 /* We can't inline functions that call __builtin_longjmp at
1017 all. The non-local goto machinery really requires the
1018 destination be in a different function. If we allow the
1019 function calling __builtin_longjmp to be inlined into the
1020 function calling __builtin_setjmp, Things will Go Awry. */
1021 inline_forbidden_reason
1022 = G_("%Jfunction %qF can never be inlined because "
1023 "it uses setjmp-longjmp exception handling");
1024 return node;
1026 case BUILT_IN_NONLOCAL_GOTO:
1027 /* Similarly. */
1028 inline_forbidden_reason
1029 = G_("%Jfunction %qF can never be inlined because "
1030 "it uses non-local goto");
1031 return node;
1033 case BUILT_IN_RETURN:
1034 case BUILT_IN_APPLY_ARGS:
1035 /* If a __builtin_apply_args caller would be inlined,
1036 it would be saving arguments of the function it has
1037 been inlined into. Similarly __builtin_return would
1038 return from the function the inline has been inlined into. */
1039 inline_forbidden_reason
1040 = G_("%Jfunction %qF can never be inlined because "
1041 "it uses __builtin_return or __builtin_apply_args");
1042 return node;
1044 default:
1045 break;
1047 break;
1049 case GOTO_EXPR:
1050 t = TREE_OPERAND (node, 0);
1052 /* We will not inline a function which uses computed goto. The
1053 addresses of its local labels, which may be tucked into
1054 global storage, are of course not constant across
1055 instantiations, which causes unexpected behavior. */
1056 if (TREE_CODE (t) != LABEL_DECL)
1058 inline_forbidden_reason
1059 = G_("%Jfunction %qF can never be inlined "
1060 "because it contains a computed goto");
1061 return node;
1063 break;
1065 case LABEL_EXPR:
1066 t = TREE_OPERAND (node, 0);
1067 if (DECL_NONLOCAL (t))
1069 /* We cannot inline a function that receives a non-local goto
1070 because we cannot remap the destination label used in the
1071 function that is performing the non-local goto. */
1072 inline_forbidden_reason
1073 = G_("%Jfunction %qF can never be inlined "
1074 "because it receives a non-local goto");
1075 return node;
1077 break;
1079 case RECORD_TYPE:
1080 case UNION_TYPE:
1081 /* We cannot inline a function of the form
1083 void F (int i) { struct S { int ar[i]; } s; }
1085 Attempting to do so produces a catch-22.
1086 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1087 UNION_TYPE nodes, then it goes into infinite recursion on a
1088 structure containing a pointer to its own type. If it doesn't,
1089 then the type node for S doesn't get adjusted properly when
1090 F is inlined, and we abort in find_function_data.
1092 ??? This is likely no longer true, but it's too late in the 4.0
1093 cycle to try to find out. This should be checked for 4.1. */
1094 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1095 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1097 inline_forbidden_reason
1098 = G_("%Jfunction %qF can never be inlined "
1099 "because it uses variable sized variables");
1100 return node;
1103 default:
1104 break;
1107 return NULL_TREE;
1110 /* Return subexpression representing possible alloca call, if any. */
1111 static tree
1112 inline_forbidden_p (tree fndecl)
1114 location_t saved_loc = input_location;
1115 tree ret = walk_tree_without_duplicates (&DECL_SAVED_TREE (fndecl),
1116 inline_forbidden_p_1, fndecl);
1118 input_location = saved_loc;
1119 return ret;
1122 /* Returns nonzero if FN is a function that does not have any
1123 fundamental inline blocking properties. */
1125 static bool
1126 inlinable_function_p (tree fn)
1128 bool inlinable = true;
1130 /* If we've already decided this function shouldn't be inlined,
1131 there's no need to check again. */
1132 if (DECL_UNINLINABLE (fn))
1133 return false;
1135 /* See if there is any language-specific reason it cannot be
1136 inlined. (It is important that this hook be called early because
1137 in C++ it may result in template instantiation.)
1138 If the function is not inlinable for language-specific reasons,
1139 it is left up to the langhook to explain why. */
1140 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1142 /* If we don't have the function body available, we can't inline it.
1143 However, this should not be recorded since we also get here for
1144 forward declared inline functions. Therefore, return at once. */
1145 if (!DECL_SAVED_TREE (fn))
1146 return false;
1148 /* If we're not inlining at all, then we cannot inline this function. */
1149 else if (!flag_inline_trees)
1150 inlinable = false;
1152 /* Only try to inline functions if DECL_INLINE is set. This should be
1153 true for all functions declared `inline', and for all other functions
1154 as well with -finline-functions.
1156 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1157 it's the front-end that must set DECL_INLINE in this case, because
1158 dwarf2out loses if a function that does not have DECL_INLINE set is
1159 inlined anyway. That is why we have both DECL_INLINE and
1160 DECL_DECLARED_INLINE_P. */
1161 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1162 here should be redundant. */
1163 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1164 inlinable = false;
1166 else if (inline_forbidden_p (fn))
1168 /* See if we should warn about uninlinable functions. Previously,
1169 some of these warnings would be issued while trying to expand
1170 the function inline, but that would cause multiple warnings
1171 about functions that would for example call alloca. But since
1172 this a property of the function, just one warning is enough.
1173 As a bonus we can now give more details about the reason why a
1174 function is not inlinable.
1175 We only warn for functions declared `inline' by the user. */
1176 bool do_warning = (warn_inline
1177 && DECL_INLINE (fn)
1178 && DECL_DECLARED_INLINE_P (fn)
1179 && !DECL_IN_SYSTEM_HEADER (fn));
1181 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1182 sorry (inline_forbidden_reason, fn, fn);
1183 else if (do_warning)
1184 warning (inline_forbidden_reason, fn, fn);
1186 inlinable = false;
1189 /* Squirrel away the result so that we don't have to check again. */
1190 DECL_UNINLINABLE (fn) = !inlinable;
1192 return inlinable;
1195 /* Estimate the cost of a memory move. Use machine dependent
1196 word size and take possible memcpy call into account. */
1199 estimate_move_cost (tree type)
1201 HOST_WIDE_INT size;
1203 size = int_size_in_bytes (type);
1205 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1206 /* Cost of a memcpy call, 3 arguments and the call. */
1207 return 4;
1208 else
1209 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1212 /* Used by estimate_num_insns. Estimate number of instructions seen
1213 by given statement. */
1215 static tree
1216 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1218 int *count = data;
1219 tree x = *tp;
1221 if (IS_TYPE_OR_DECL_P (x))
1223 *walk_subtrees = 0;
1224 return NULL;
1226 /* Assume that constants and references counts nothing. These should
1227 be majorized by amount of operations among them we count later
1228 and are common target of CSE and similar optimizations. */
1229 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1230 return NULL;
1232 switch (TREE_CODE (x))
1234 /* Containers have no cost. */
1235 case TREE_LIST:
1236 case TREE_VEC:
1237 case BLOCK:
1238 case COMPONENT_REF:
1239 case BIT_FIELD_REF:
1240 case INDIRECT_REF:
1241 case ALIGN_INDIRECT_REF:
1242 case MISALIGNED_INDIRECT_REF:
1243 case ARRAY_REF:
1244 case ARRAY_RANGE_REF:
1245 case OBJ_TYPE_REF:
1246 case EXC_PTR_EXPR: /* ??? */
1247 case FILTER_EXPR: /* ??? */
1248 case COMPOUND_EXPR:
1249 case BIND_EXPR:
1250 case WITH_CLEANUP_EXPR:
1251 case NOP_EXPR:
1252 case VIEW_CONVERT_EXPR:
1253 case SAVE_EXPR:
1254 case ADDR_EXPR:
1255 case COMPLEX_EXPR:
1256 case RANGE_EXPR:
1257 case CASE_LABEL_EXPR:
1258 case SSA_NAME:
1259 case CATCH_EXPR:
1260 case EH_FILTER_EXPR:
1261 case STATEMENT_LIST:
1262 case ERROR_MARK:
1263 case NON_LVALUE_EXPR:
1264 case FDESC_EXPR:
1265 case VA_ARG_EXPR:
1266 case TRY_CATCH_EXPR:
1267 case TRY_FINALLY_EXPR:
1268 case LABEL_EXPR:
1269 case GOTO_EXPR:
1270 case RETURN_EXPR:
1271 case EXIT_EXPR:
1272 case LOOP_EXPR:
1273 case PHI_NODE:
1274 case WITH_SIZE_EXPR:
1275 break;
1277 /* We don't account constants for now. Assume that the cost is amortized
1278 by operations that do use them. We may re-consider this decision once
1279 we are able to optimize the tree before estimating it's size and break
1280 out static initializers. */
1281 case IDENTIFIER_NODE:
1282 case INTEGER_CST:
1283 case REAL_CST:
1284 case COMPLEX_CST:
1285 case VECTOR_CST:
1286 case STRING_CST:
1287 *walk_subtrees = 0;
1288 return NULL;
1290 /* Try to estimate the cost of assignments. We have three cases to
1291 deal with:
1292 1) Simple assignments to registers;
1293 2) Stores to things that must live in memory. This includes
1294 "normal" stores to scalars, but also assignments of large
1295 structures, or constructors of big arrays;
1296 3) TARGET_EXPRs.
1298 Let us look at the first two cases, assuming we have "a = b + C":
1299 <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1300 If "a" is a GIMPLE register, the assignment to it is free on almost
1301 any target, because "a" usually ends up in a real register. Hence
1302 the only cost of this expression comes from the PLUS_EXPR, and we
1303 can ignore the MODIFY_EXPR.
1304 If "a" is not a GIMPLE register, the assignment to "a" will most
1305 likely be a real store, so the cost of the MODIFY_EXPR is the cost
1306 of moving something into "a", which we compute using the function
1307 estimate_move_cost.
1309 The third case deals with TARGET_EXPRs, for which the semantics are
1310 that a temporary is assigned, unless the TARGET_EXPR itself is being
1311 assigned to something else. In the latter case we do not need the
1312 temporary. E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1313 MODIFY_EXPR is free. */
1314 case INIT_EXPR:
1315 case MODIFY_EXPR:
1316 /* Is the right and side a TARGET_EXPR? */
1317 if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
1318 break;
1319 /* ... fall through ... */
1321 case TARGET_EXPR:
1322 x = TREE_OPERAND (x, 0);
1323 /* Is this an assignments to a register? */
1324 if (is_gimple_reg (x))
1325 break;
1326 /* Otherwise it's a store, so fall through to compute the move cost. */
1328 case CONSTRUCTOR:
1329 *count += estimate_move_cost (TREE_TYPE (x));
1330 break;
1332 /* Assign cost of 1 to usual operations.
1333 ??? We may consider mapping RTL costs to this. */
1334 case COND_EXPR:
1336 case PLUS_EXPR:
1337 case MINUS_EXPR:
1338 case MULT_EXPR:
1340 case FIX_TRUNC_EXPR:
1341 case FIX_CEIL_EXPR:
1342 case FIX_FLOOR_EXPR:
1343 case FIX_ROUND_EXPR:
1345 case NEGATE_EXPR:
1346 case FLOAT_EXPR:
1347 case MIN_EXPR:
1348 case MAX_EXPR:
1349 case ABS_EXPR:
1351 case LSHIFT_EXPR:
1352 case RSHIFT_EXPR:
1353 case LROTATE_EXPR:
1354 case RROTATE_EXPR:
1356 case BIT_IOR_EXPR:
1357 case BIT_XOR_EXPR:
1358 case BIT_AND_EXPR:
1359 case BIT_NOT_EXPR:
1361 case TRUTH_ANDIF_EXPR:
1362 case TRUTH_ORIF_EXPR:
1363 case TRUTH_AND_EXPR:
1364 case TRUTH_OR_EXPR:
1365 case TRUTH_XOR_EXPR:
1366 case TRUTH_NOT_EXPR:
1368 case LT_EXPR:
1369 case LE_EXPR:
1370 case GT_EXPR:
1371 case GE_EXPR:
1372 case EQ_EXPR:
1373 case NE_EXPR:
1374 case ORDERED_EXPR:
1375 case UNORDERED_EXPR:
1377 case UNLT_EXPR:
1378 case UNLE_EXPR:
1379 case UNGT_EXPR:
1380 case UNGE_EXPR:
1381 case UNEQ_EXPR:
1382 case LTGT_EXPR:
1384 case CONVERT_EXPR:
1386 case CONJ_EXPR:
1388 case PREDECREMENT_EXPR:
1389 case PREINCREMENT_EXPR:
1390 case POSTDECREMENT_EXPR:
1391 case POSTINCREMENT_EXPR:
1393 case SWITCH_EXPR:
1395 case ASM_EXPR:
1397 case REALIGN_LOAD_EXPR:
1399 case RESX_EXPR:
1400 *count += 1;
1401 break;
1403 /* Few special cases of expensive operations. This is useful
1404 to avoid inlining on functions having too many of these. */
1405 case TRUNC_DIV_EXPR:
1406 case CEIL_DIV_EXPR:
1407 case FLOOR_DIV_EXPR:
1408 case ROUND_DIV_EXPR:
1409 case EXACT_DIV_EXPR:
1410 case TRUNC_MOD_EXPR:
1411 case CEIL_MOD_EXPR:
1412 case FLOOR_MOD_EXPR:
1413 case ROUND_MOD_EXPR:
1414 case RDIV_EXPR:
1415 *count += 10;
1416 break;
1417 case CALL_EXPR:
1419 tree decl = get_callee_fndecl (x);
1420 tree arg;
1422 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1423 switch (DECL_FUNCTION_CODE (decl))
1425 case BUILT_IN_CONSTANT_P:
1426 *walk_subtrees = 0;
1427 return NULL_TREE;
1428 case BUILT_IN_EXPECT:
1429 return NULL_TREE;
1430 default:
1431 break;
1434 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
1435 that does use function declaration to figure out the arguments. */
1436 if (!decl)
1438 for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
1439 *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
1441 else
1443 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1444 *count += estimate_move_cost (TREE_TYPE (arg));
1447 *count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
1448 break;
1450 default:
1451 /* Abort here se we know we don't miss any nodes. */
1452 gcc_unreachable ();
1454 return NULL;
1457 /* Estimate number of instructions that will be created by expanding EXPR. */
1460 estimate_num_insns (tree expr)
1462 int num = 0;
1463 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1464 return num;
1467 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1469 static tree
1470 expand_call_inline (tree *tp, int *walk_subtrees, void *data)
1472 inline_data *id;
1473 tree t;
1474 tree expr;
1475 tree stmt;
1476 tree use_retvar;
1477 tree decl;
1478 tree fn;
1479 tree arg_inits;
1480 tree *inlined_body;
1481 splay_tree st;
1482 tree args;
1483 tree return_slot_addr;
1484 tree modify_dest;
1485 location_t saved_location;
1486 struct cgraph_edge *edge;
1487 const char *reason;
1489 /* See what we've got. */
1490 id = (inline_data *) data;
1491 t = *tp;
1493 /* Set input_location here so we get the right instantiation context
1494 if we call instantiate_decl from inlinable_function_p. */
1495 saved_location = input_location;
1496 if (EXPR_HAS_LOCATION (t))
1497 input_location = EXPR_LOCATION (t);
1499 /* Recurse, but letting recursive invocations know that we are
1500 inside the body of a TARGET_EXPR. */
1501 if (TREE_CODE (*tp) == TARGET_EXPR)
1503 #if 0
1504 int i, len = TREE_CODE_LENGTH (TARGET_EXPR);
1506 /* We're walking our own subtrees. */
1507 *walk_subtrees = 0;
1509 /* Actually walk over them. This loop is the body of
1510 walk_trees, omitting the case where the TARGET_EXPR
1511 itself is handled. */
1512 for (i = 0; i < len; ++i)
1514 if (i == 2)
1515 ++id->in_target_cleanup_p;
1516 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
1517 id->tree_pruner);
1518 if (i == 2)
1519 --id->in_target_cleanup_p;
1522 goto egress;
1523 #endif
1526 if (TYPE_P (t))
1527 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1528 them should not be expanded. This can happen if the type is a
1529 dynamic array type, for example. */
1530 *walk_subtrees = 0;
1532 /* From here on, we're only interested in CALL_EXPRs. */
1533 if (TREE_CODE (t) != CALL_EXPR)
1534 goto egress;
1536 /* First, see if we can figure out what function is being called.
1537 If we cannot, then there is no hope of inlining the function. */
1538 fn = get_callee_fndecl (t);
1539 if (!fn)
1540 goto egress;
1542 /* Turn forward declarations into real ones. */
1543 fn = cgraph_node (fn)->decl;
1545 /* If fn is a declaration of a function in a nested scope that was
1546 globally declared inline, we don't set its DECL_INITIAL.
1547 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1548 C++ front-end uses it for cdtors to refer to their internal
1549 declarations, that are not real functions. Fortunately those
1550 don't have trees to be saved, so we can tell by checking their
1551 DECL_SAVED_TREE. */
1552 if (! DECL_INITIAL (fn)
1553 && DECL_ABSTRACT_ORIGIN (fn)
1554 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1555 fn = DECL_ABSTRACT_ORIGIN (fn);
1557 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1558 Kill this check once this is fixed. */
1559 if (!id->current_node->analyzed)
1560 goto egress;
1562 edge = cgraph_edge (id->current_node, t);
1564 /* Constant propagation on argument done during previous inlining
1565 may create new direct call. Produce an edge for it. */
1566 if (!edge)
1568 struct cgraph_node *dest = cgraph_node (fn);
1570 /* We have missing edge in the callgraph. This can happen in one case
1571 where previous inlining turned indirect call into direct call by
1572 constant propagating arguments. In all other cases we hit a bug
1573 (incorrect node sharing is most common reason for missing edges. */
1574 gcc_assert (dest->needed || !flag_unit_at_a_time);
1575 cgraph_create_edge (id->node, dest, t)->inline_failed
1576 = N_("originally indirect function call not considered for inlining");
1577 goto egress;
1580 /* Don't try to inline functions that are not well-suited to
1581 inlining. */
1582 if (!cgraph_inline_p (edge, &reason))
1584 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1586 sorry ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
1587 sorry ("called from here");
1589 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1590 && !DECL_IN_SYSTEM_HEADER (fn)
1591 && strlen (reason)
1592 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)))
1594 warning ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
1595 warning ("called from here");
1597 goto egress;
1600 #ifdef ENABLE_CHECKING
1601 if (edge->callee->decl != id->node->decl)
1602 verify_cgraph_node (edge->callee);
1603 #endif
1605 if (! lang_hooks.tree_inlining.start_inlining (fn))
1606 goto egress;
1608 /* Build a block containing code to initialize the arguments, the
1609 actual inline expansion of the body, and a label for the return
1610 statements within the function to jump to. The type of the
1611 statement expression is the return type of the function call. */
1612 stmt = NULL;
1613 expr = build (BIND_EXPR, void_type_node, NULL_TREE,
1614 stmt, make_node (BLOCK));
1615 BLOCK_ABSTRACT_ORIGIN (BIND_EXPR_BLOCK (expr)) = fn;
1617 /* Local declarations will be replaced by their equivalents in this
1618 map. */
1619 st = id->decl_map;
1620 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
1621 NULL, NULL);
1623 /* Initialize the parameters. */
1624 args = TREE_OPERAND (t, 1);
1625 return_slot_addr = NULL_TREE;
1626 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
1628 return_slot_addr = TREE_VALUE (args);
1629 args = TREE_CHAIN (args);
1630 TREE_TYPE (expr) = void_type_node;
1633 arg_inits = initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2),
1634 fn, expr);
1635 if (arg_inits)
1637 /* Expand any inlined calls in the initializers. Do this before we
1638 push FN on the stack of functions we are inlining; we want to
1639 inline calls to FN that appear in the initializers for the
1640 parameters.
1642 Note we need to save and restore the saved tree statement iterator
1643 to avoid having it clobbered by expand_calls_inline. */
1644 tree_stmt_iterator save_tsi;
1646 save_tsi = id->tsi;
1647 expand_calls_inline (&arg_inits, id);
1648 id->tsi = save_tsi;
1650 /* And add them to the tree. */
1651 append_to_statement_list (arg_inits, &BIND_EXPR_BODY (expr));
1654 /* Record the function we are about to inline so that we can avoid
1655 recursing into it. */
1656 VARRAY_PUSH_TREE (id->fns, fn);
1658 /* Return statements in the function body will be replaced by jumps
1659 to the RET_LABEL. */
1660 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1661 DECL_ARTIFICIAL (id->ret_label) = 1;
1662 DECL_IGNORED_P (id->ret_label) = 1;
1663 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
1664 insert_decl_map (id, id->ret_label, id->ret_label);
1666 gcc_assert (DECL_INITIAL (fn));
1667 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
1669 /* Find the lhs to which the result of this call is assigned. */
1670 modify_dest = tsi_stmt (id->tsi);
1671 if (TREE_CODE (modify_dest) == MODIFY_EXPR)
1673 modify_dest = TREE_OPERAND (modify_dest, 0);
1675 /* The function which we are inlining might not return a value,
1676 in which case we should issue a warning that the function
1677 does not return a value. In that case the optimizers will
1678 see that the variable to which the value is assigned was not
1679 initialized. We do not want to issue a warning about that
1680 uninitialized variable. */
1681 if (DECL_P (modify_dest))
1682 TREE_NO_WARNING (modify_dest) = 1;
1684 else
1685 modify_dest = NULL;
1687 /* Declare the return variable for the function. */
1688 decl = declare_return_variable (id, return_slot_addr,
1689 modify_dest, &use_retvar);
1691 /* After we've initialized the parameters, we insert the body of the
1692 function itself. */
1694 struct cgraph_node *old_node = id->current_node;
1695 tree copy;
1697 id->current_node = edge->callee;
1698 copy = copy_body (id);
1700 /* If the function uses a return slot, then it may legitimately
1701 fall through while still returning a value, so we have to skip
1702 the warning here. */
1703 if (warn_return_type
1704 && !TREE_NO_WARNING (fn)
1705 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
1706 && return_slot_addr == NULL_TREE
1707 && block_may_fallthru (copy)
1708 && !DECL_IN_SYSTEM_HEADER (fn))
1710 warning ("control may reach end of non-void function %qD being inlined",
1711 fn);
1712 TREE_NO_WARNING (fn) = 1;
1715 append_to_statement_list (copy, &BIND_EXPR_BODY (expr));
1716 id->current_node = old_node;
1718 inlined_body = &BIND_EXPR_BODY (expr);
1720 /* After the body of the function comes the RET_LABEL. This must come
1721 before we evaluate the returned value below, because that evaluation
1722 may cause RTL to be generated. */
1723 if (TREE_USED (id->ret_label))
1725 tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
1726 append_to_statement_list (label, &BIND_EXPR_BODY (expr));
1729 /* Clean up. */
1730 splay_tree_delete (id->decl_map);
1731 id->decl_map = st;
1733 /* Although, from the semantic viewpoint, the new expression has
1734 side-effects only if the old one did, it is not possible, from
1735 the technical viewpoint, to evaluate the body of a function
1736 multiple times without serious havoc. */
1737 TREE_SIDE_EFFECTS (expr) = 1;
1739 tsi_link_before (&id->tsi, expr, TSI_SAME_STMT);
1741 /* If the inlined function returns a result that we care about,
1742 then we're going to need to splice in a MODIFY_EXPR. Otherwise
1743 the call was a standalone statement and we can just replace it
1744 with the BIND_EXPR inline representation of the called function. */
1745 if (!use_retvar || !modify_dest)
1746 *tsi_stmt_ptr (id->tsi) = build_empty_stmt ();
1747 else
1748 *tp = use_retvar;
1750 /* When we gimplify a function call, we may clear TREE_SIDE_EFFECTS on
1751 the call if it is to a "const" function. Thus the copy of
1752 TREE_SIDE_EFFECTS from the CALL_EXPR to the BIND_EXPR above with
1753 result in TREE_SIDE_EFFECTS not being set for the inlined copy of a
1754 "const" function.
1756 Unfortunately, that is wrong as inlining the function can create/expose
1757 interesting side effects (such as setting of a return value).
1759 The easiest solution is to simply recalculate TREE_SIDE_EFFECTS for
1760 the toplevel expression. */
1761 recalculate_side_effects (expr);
1763 /* Output the inlining info for this abstract function, since it has been
1764 inlined. If we don't do this now, we can lose the information about the
1765 variables in the function when the blocks get blown away as soon as we
1766 remove the cgraph node. */
1767 (*debug_hooks->outlining_inline_function) (edge->callee->decl);
1769 /* Update callgraph if needed. */
1770 cgraph_remove_node (edge->callee);
1772 /* Recurse into the body of the just inlined function. */
1773 expand_calls_inline (inlined_body, id);
1774 VARRAY_POP (id->fns);
1776 /* Don't walk into subtrees. We've already handled them above. */
1777 *walk_subtrees = 0;
1779 lang_hooks.tree_inlining.end_inlining (fn);
1781 /* Keep iterating. */
1782 egress:
1783 input_location = saved_location;
1784 return NULL_TREE;
1787 static void
1788 expand_calls_inline (tree *stmt_p, inline_data *id)
1790 tree stmt = *stmt_p;
1791 enum tree_code code = TREE_CODE (stmt);
1792 int dummy;
1794 switch (code)
1796 case STATEMENT_LIST:
1798 tree_stmt_iterator i;
1799 tree new;
1801 for (i = tsi_start (stmt); !tsi_end_p (i); )
1803 id->tsi = i;
1804 expand_calls_inline (tsi_stmt_ptr (i), id);
1806 new = tsi_stmt (i);
1807 if (TREE_CODE (new) == STATEMENT_LIST)
1809 tsi_link_before (&i, new, TSI_SAME_STMT);
1810 tsi_delink (&i);
1812 else
1813 tsi_next (&i);
1816 break;
1818 case COND_EXPR:
1819 expand_calls_inline (&COND_EXPR_THEN (stmt), id);
1820 expand_calls_inline (&COND_EXPR_ELSE (stmt), id);
1821 break;
1823 case CATCH_EXPR:
1824 expand_calls_inline (&CATCH_BODY (stmt), id);
1825 break;
1827 case EH_FILTER_EXPR:
1828 expand_calls_inline (&EH_FILTER_FAILURE (stmt), id);
1829 break;
1831 case TRY_CATCH_EXPR:
1832 case TRY_FINALLY_EXPR:
1833 expand_calls_inline (&TREE_OPERAND (stmt, 0), id);
1834 expand_calls_inline (&TREE_OPERAND (stmt, 1), id);
1835 break;
1837 case BIND_EXPR:
1838 expand_calls_inline (&BIND_EXPR_BODY (stmt), id);
1839 break;
1841 case COMPOUND_EXPR:
1842 /* We're gimple. We should have gotten rid of all these. */
1843 gcc_unreachable ();
1845 case RETURN_EXPR:
1846 stmt_p = &TREE_OPERAND (stmt, 0);
1847 stmt = *stmt_p;
1848 if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
1849 break;
1851 /* FALLTHRU */
1853 case MODIFY_EXPR:
1854 stmt_p = &TREE_OPERAND (stmt, 1);
1855 stmt = *stmt_p;
1856 if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
1858 stmt_p = &TREE_OPERAND (stmt, 0);
1859 stmt = *stmt_p;
1861 if (TREE_CODE (stmt) != CALL_EXPR)
1862 break;
1864 /* FALLTHRU */
1866 case CALL_EXPR:
1867 expand_call_inline (stmt_p, &dummy, id);
1868 break;
1870 default:
1871 break;
1875 /* Expand calls to inline functions in the body of FN. */
1877 void
1878 optimize_inline_calls (tree fn)
1880 inline_data id;
1881 tree prev_fn;
1883 /* There is no point in performing inlining if errors have already
1884 occurred -- and we might crash if we try to inline invalid
1885 code. */
1886 if (errorcount || sorrycount)
1887 return;
1889 /* Clear out ID. */
1890 memset (&id, 0, sizeof (id));
1892 id.current_node = id.node = cgraph_node (fn);
1893 /* Don't allow recursion into FN. */
1894 VARRAY_TREE_INIT (id.fns, 32, "fns");
1895 VARRAY_PUSH_TREE (id.fns, fn);
1896 /* Or any functions that aren't finished yet. */
1897 prev_fn = NULL_TREE;
1898 if (current_function_decl)
1900 VARRAY_PUSH_TREE (id.fns, current_function_decl);
1901 prev_fn = current_function_decl;
1904 prev_fn = lang_hooks.tree_inlining.add_pending_fn_decls (&id.fns, prev_fn);
1906 /* Keep track of the low-water mark, i.e., the point where the first
1907 real inlining is represented in ID.FNS. */
1908 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1910 /* Replace all calls to inline functions with the bodies of those
1911 functions. */
1912 id.tree_pruner = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
1913 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1915 /* Clean up. */
1916 htab_delete (id.tree_pruner);
1918 #ifdef ENABLE_CHECKING
1920 struct cgraph_edge *e;
1922 verify_cgraph_node (id.node);
1924 /* Double check that we inlined everything we are supposed to inline. */
1925 for (e = id.node->callees; e; e = e->next_callee)
1926 gcc_assert (e->inline_failed);
1928 #endif
1931 /* FN is a function that has a complete body, and CLONE is a function whose
1932 body is to be set to a copy of FN, mapping argument declarations according
1933 to the ARG_MAP splay_tree. */
1935 void
1936 clone_body (tree clone, tree fn, void *arg_map)
1938 inline_data id;
1940 /* Clone the body, as if we were making an inline call. But, remap the
1941 parameters in the callee to the parameters of caller. If there's an
1942 in-charge parameter, map it to an appropriate constant. */
1943 memset (&id, 0, sizeof (id));
1944 VARRAY_TREE_INIT (id.fns, 2, "fns");
1945 VARRAY_PUSH_TREE (id.fns, clone);
1946 VARRAY_PUSH_TREE (id.fns, fn);
1947 id.decl_map = (splay_tree)arg_map;
1949 /* Cloning is treated slightly differently from inlining. Set
1950 CLONING_P so that it's clear which operation we're performing. */
1951 id.cloning_p = true;
1953 /* Actually copy the body. */
1954 append_to_statement_list_force (copy_body (&id), &DECL_SAVED_TREE (clone));
1957 /* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
1958 in *arg_copy and of the static chain, if any, in *sc_copy. */
1960 tree
1961 save_body (tree fn, tree *arg_copy, tree *sc_copy)
1963 inline_data id;
1964 tree body, *parg;
1966 memset (&id, 0, sizeof (id));
1967 VARRAY_TREE_INIT (id.fns, 1, "fns");
1968 VARRAY_PUSH_TREE (id.fns, fn);
1969 id.node = cgraph_node (fn);
1970 id.saving_p = true;
1971 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
1972 *arg_copy = DECL_ARGUMENTS (fn);
1974 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
1976 tree new = copy_node (*parg);
1978 lang_hooks.dup_lang_specific_decl (new);
1979 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
1980 insert_decl_map (&id, *parg, new);
1981 TREE_CHAIN (new) = TREE_CHAIN (*parg);
1982 *parg = new;
1985 *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1986 if (*sc_copy)
1988 tree new = copy_node (*sc_copy);
1990 lang_hooks.dup_lang_specific_decl (new);
1991 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
1992 insert_decl_map (&id, *sc_copy, new);
1993 TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
1994 *sc_copy = new;
1997 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
1999 /* Actually copy the body. */
2000 body = copy_body (&id);
2002 /* Clean up. */
2003 splay_tree_delete (id.decl_map);
2004 return body;
2007 #define WALK_SUBTREE(NODE) \
2008 do \
2010 result = walk_tree (&(NODE), func, data, pset); \
2011 if (result) \
2012 return result; \
2014 while (0)
2016 /* This is a subroutine of walk_tree that walks field of TYPE that are to
2017 be walked whenever a type is seen in the tree. Rest of operands and return
2018 value are as for walk_tree. */
2020 static tree
2021 walk_type_fields (tree type, walk_tree_fn func, void *data,
2022 struct pointer_set_t *pset)
2024 tree result = NULL_TREE;
2026 switch (TREE_CODE (type))
2028 case POINTER_TYPE:
2029 case REFERENCE_TYPE:
2030 /* We have to worry about mutually recursive pointers. These can't
2031 be written in C. They can in Ada. It's pathological, but
2032 there's an ACATS test (c38102a) that checks it. Deal with this
2033 by checking if we're pointing to another pointer, that one
2034 points to another pointer, that one does too, and we have no htab.
2035 If so, get a hash table. We check three levels deep to avoid
2036 the cost of the hash table if we don't need one. */
2037 if (POINTER_TYPE_P (TREE_TYPE (type))
2038 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
2039 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
2040 && !pset)
2042 result = walk_tree_without_duplicates (&TREE_TYPE (type),
2043 func, data);
2044 if (result)
2045 return result;
2047 break;
2050 /* ... fall through ... */
2052 case COMPLEX_TYPE:
2053 WALK_SUBTREE (TREE_TYPE (type));
2054 break;
2056 case METHOD_TYPE:
2057 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
2059 /* Fall through. */
2061 case FUNCTION_TYPE:
2062 WALK_SUBTREE (TREE_TYPE (type));
2064 tree arg;
2066 /* We never want to walk into default arguments. */
2067 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
2068 WALK_SUBTREE (TREE_VALUE (arg));
2070 break;
2072 case ARRAY_TYPE:
2073 /* Don't follow this nodes's type if a pointer for fear that we'll
2074 have infinite recursion. Those types are uninteresting anyway. */
2075 if (!POINTER_TYPE_P (TREE_TYPE (type))
2076 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE)
2077 WALK_SUBTREE (TREE_TYPE (type));
2078 WALK_SUBTREE (TYPE_DOMAIN (type));
2079 break;
2081 case BOOLEAN_TYPE:
2082 case ENUMERAL_TYPE:
2083 case INTEGER_TYPE:
2084 case CHAR_TYPE:
2085 case REAL_TYPE:
2086 WALK_SUBTREE (TYPE_MIN_VALUE (type));
2087 WALK_SUBTREE (TYPE_MAX_VALUE (type));
2088 break;
2090 case OFFSET_TYPE:
2091 WALK_SUBTREE (TREE_TYPE (type));
2092 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
2093 break;
2095 default:
2096 break;
2099 return NULL_TREE;
2102 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
2103 called with the DATA and the address of each sub-tree. If FUNC returns a
2104 non-NULL value, the traversal is aborted, and the value returned by FUNC
2105 is returned. If PSET is non-NULL it is used to record the nodes visited,
2106 and to avoid visiting a node more than once. */
2108 tree
2109 walk_tree (tree *tp, walk_tree_fn func, void *data, struct pointer_set_t *pset)
2111 enum tree_code code;
2112 int walk_subtrees;
2113 tree result;
2115 #define WALK_SUBTREE_TAIL(NODE) \
2116 do \
2118 tp = & (NODE); \
2119 goto tail_recurse; \
2121 while (0)
2123 tail_recurse:
2124 /* Skip empty subtrees. */
2125 if (!*tp)
2126 return NULL_TREE;
2128 /* Don't walk the same tree twice, if the user has requested
2129 that we avoid doing so. */
2130 if (pset && pointer_set_insert (pset, *tp))
2131 return NULL_TREE;
2133 /* Call the function. */
2134 walk_subtrees = 1;
2135 result = (*func) (tp, &walk_subtrees, data);
2137 /* If we found something, return it. */
2138 if (result)
2139 return result;
2141 code = TREE_CODE (*tp);
2143 /* Even if we didn't, FUNC may have decided that there was nothing
2144 interesting below this point in the tree. */
2145 if (!walk_subtrees)
2147 if (code == TREE_LIST)
2148 /* But we still need to check our siblings. */
2149 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2150 else
2151 return NULL_TREE;
2154 result = lang_hooks.tree_inlining.walk_subtrees (tp, &walk_subtrees, func,
2155 data, pset);
2156 if (result || ! walk_subtrees)
2157 return result;
2159 /* If this is a DECL_EXPR, walk into various fields of the type that it's
2160 defining. We only want to walk into these fields of a type in this
2161 case. Note that decls get walked as part of the processing of a
2162 BIND_EXPR.
2164 ??? Precisely which fields of types that we are supposed to walk in
2165 this case vs. the normal case aren't well defined. */
2166 if (code == DECL_EXPR
2167 && TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL
2168 && TREE_CODE (TREE_TYPE (DECL_EXPR_DECL (*tp))) != ERROR_MARK)
2170 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
2172 /* Call the function for the type. See if it returns anything or
2173 doesn't want us to continue. If we are to continue, walk both
2174 the normal fields and those for the declaration case. */
2175 result = (*func) (type_p, &walk_subtrees, data);
2176 if (result || !walk_subtrees)
2177 return NULL_TREE;
2179 result = walk_type_fields (*type_p, func, data, pset);
2180 if (result)
2181 return result;
2183 WALK_SUBTREE (TYPE_SIZE (*type_p));
2184 WALK_SUBTREE (TYPE_SIZE_UNIT (*type_p));
2186 /* If this is a record type, also walk the fields. */
2187 if (TREE_CODE (*type_p) == RECORD_TYPE
2188 || TREE_CODE (*type_p) == UNION_TYPE
2189 || TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2191 tree field;
2193 for (field = TYPE_FIELDS (*type_p); field;
2194 field = TREE_CHAIN (field))
2196 /* We'd like to look at the type of the field, but we can easily
2197 get infinite recursion. So assume it's pointed to elsewhere
2198 in the tree. Also, ignore things that aren't fields. */
2199 if (TREE_CODE (field) != FIELD_DECL)
2200 continue;
2202 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
2203 WALK_SUBTREE (DECL_SIZE (field));
2204 WALK_SUBTREE (DECL_SIZE_UNIT (field));
2205 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2206 WALK_SUBTREE (DECL_QUALIFIER (field));
2211 else if (code != SAVE_EXPR
2212 && code != BIND_EXPR
2213 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2215 int i, len;
2217 /* Walk over all the sub-trees of this operand. */
2218 len = TREE_CODE_LENGTH (code);
2219 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
2220 But, we only want to walk once. */
2221 if (code == TARGET_EXPR
2222 && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
2223 --len;
2225 /* Go through the subtrees. We need to do this in forward order so
2226 that the scope of a FOR_EXPR is handled properly. */
2227 #ifdef DEBUG_WALK_TREE
2228 for (i = 0; i < len; ++i)
2229 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2230 #else
2231 for (i = 0; i < len - 1; ++i)
2232 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2234 if (len)
2236 /* The common case is that we may tail recurse here. */
2237 if (code != BIND_EXPR
2238 && !TREE_CHAIN (*tp))
2239 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
2240 else
2241 WALK_SUBTREE (TREE_OPERAND (*tp, len - 1));
2243 #endif
2246 /* If this is a type, walk the needed fields in the type. */
2247 else if (TYPE_P (*tp))
2249 result = walk_type_fields (*tp, func, data, pset);
2250 if (result)
2251 return result;
2253 else
2255 /* Not one of the easy cases. We must explicitly go through the
2256 children. */
2257 switch (code)
2259 case ERROR_MARK:
2260 case IDENTIFIER_NODE:
2261 case INTEGER_CST:
2262 case REAL_CST:
2263 case VECTOR_CST:
2264 case STRING_CST:
2265 case BLOCK:
2266 case PLACEHOLDER_EXPR:
2267 case SSA_NAME:
2268 case FIELD_DECL:
2269 case RESULT_DECL:
2270 /* None of thse have subtrees other than those already walked
2271 above. */
2272 break;
2274 case TREE_LIST:
2275 WALK_SUBTREE (TREE_VALUE (*tp));
2276 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2277 break;
2279 case TREE_VEC:
2281 int len = TREE_VEC_LENGTH (*tp);
2283 if (len == 0)
2284 break;
2286 /* Walk all elements but the first. */
2287 while (--len)
2288 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
2290 /* Now walk the first one as a tail call. */
2291 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
2294 case COMPLEX_CST:
2295 WALK_SUBTREE (TREE_REALPART (*tp));
2296 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
2298 case CONSTRUCTOR:
2299 WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
2301 case SAVE_EXPR:
2302 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
2304 case BIND_EXPR:
2306 tree decl;
2307 for (decl = BIND_EXPR_VARS (*tp); decl; decl = TREE_CHAIN (decl))
2309 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
2310 into declarations that are just mentioned, rather than
2311 declared; they don't really belong to this part of the tree.
2312 And, we can see cycles: the initializer for a declaration
2313 can refer to the declaration itself. */
2314 WALK_SUBTREE (DECL_INITIAL (decl));
2315 WALK_SUBTREE (DECL_SIZE (decl));
2316 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
2318 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
2321 case STATEMENT_LIST:
2323 tree_stmt_iterator i;
2324 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
2325 WALK_SUBTREE (*tsi_stmt_ptr (i));
2327 break;
2329 default:
2330 /* ??? This could be a language-defined node. We really should make
2331 a hook for it, but right now just ignore it. */
2332 break;
2336 /* We didn't find what we were looking for. */
2337 return NULL_TREE;
2339 #undef WALK_SUBTREE
2340 #undef WALK_SUBTREE_TAIL
2343 /* Like walk_tree, but does not walk duplicate nodes more than once. */
2345 tree
2346 walk_tree_without_duplicates (tree *tp, walk_tree_fn func, void *data)
2348 tree result;
2349 struct pointer_set_t *pset;
2351 pset = pointer_set_create ();
2352 result = walk_tree (tp, func, data, pset);
2353 pointer_set_destroy (pset);
2354 return result;
2357 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2359 tree
2360 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2362 enum tree_code code = TREE_CODE (*tp);
2364 /* We make copies of most nodes. */
2365 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2366 || code == TREE_LIST
2367 || code == TREE_VEC
2368 || code == TYPE_DECL)
2370 /* Because the chain gets clobbered when we make a copy, we save it
2371 here. */
2372 tree chain = TREE_CHAIN (*tp);
2373 tree new;
2375 /* Copy the node. */
2376 new = copy_node (*tp);
2378 /* Propagate mudflap marked-ness. */
2379 if (flag_mudflap && mf_marked_p (*tp))
2380 mf_mark (new);
2382 *tp = new;
2384 /* Now, restore the chain, if appropriate. That will cause
2385 walk_tree to walk into the chain as well. */
2386 if (code == PARM_DECL || code == TREE_LIST)
2387 TREE_CHAIN (*tp) = chain;
2389 /* For now, we don't update BLOCKs when we make copies. So, we
2390 have to nullify all BIND_EXPRs. */
2391 if (TREE_CODE (*tp) == BIND_EXPR)
2392 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2395 else if (TREE_CODE_CLASS (code) == tcc_type)
2396 *walk_subtrees = 0;
2397 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2398 *walk_subtrees = 0;
2399 else if (TREE_CODE_CLASS (code) == tcc_constant)
2400 *walk_subtrees = 0;
2401 else
2402 gcc_assert (code != STATEMENT_LIST);
2403 return NULL_TREE;
2406 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2407 information indicating to what new SAVE_EXPR this one should be mapped,
2408 use that one. Otherwise, create a new node and enter it in ST. */
2410 static void
2411 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2413 splay_tree st = (splay_tree) st_;
2414 splay_tree_node n;
2415 tree t;
2417 /* See if we already encountered this SAVE_EXPR. */
2418 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2420 /* If we didn't already remap this SAVE_EXPR, do so now. */
2421 if (!n)
2423 t = copy_node (*tp);
2425 /* Remember this SAVE_EXPR. */
2426 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2427 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2428 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2430 else
2432 /* We've already walked into this SAVE_EXPR; don't do it again. */
2433 *walk_subtrees = 0;
2434 t = (tree) n->value;
2437 /* Replace this SAVE_EXPR with the copy. */
2438 *tp = t;
2441 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2442 copies the declaration and enters it in the splay_tree in DATA (which is
2443 really an `inline_data *'). */
2445 static tree
2446 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2447 void *data)
2449 inline_data *id = (inline_data *) data;
2451 /* Don't walk into types. */
2452 if (TYPE_P (*tp))
2453 *walk_subtrees = 0;
2455 else if (TREE_CODE (*tp) == LABEL_EXPR)
2457 tree decl = TREE_OPERAND (*tp, 0);
2459 /* Copy the decl and remember the copy. */
2460 insert_decl_map (id, decl,
2461 copy_decl_for_inlining (decl, DECL_CONTEXT (decl),
2462 DECL_CONTEXT (decl)));
2465 return NULL_TREE;
2468 /* Perform any modifications to EXPR required when it is unsaved. Does
2469 not recurse into EXPR's subtrees. */
2471 static void
2472 unsave_expr_1 (tree expr)
2474 switch (TREE_CODE (expr))
2476 case TARGET_EXPR:
2477 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2478 It's OK for this to happen if it was part of a subtree that
2479 isn't immediately expanded, such as operand 2 of another
2480 TARGET_EXPR. */
2481 if (TREE_OPERAND (expr, 1))
2482 break;
2484 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2485 TREE_OPERAND (expr, 3) = NULL_TREE;
2486 break;
2488 default:
2489 break;
2493 /* Called via walk_tree when an expression is unsaved. Using the
2494 splay_tree pointed to by ST (which is really a `splay_tree'),
2495 remaps all local declarations to appropriate replacements. */
2497 static tree
2498 unsave_r (tree *tp, int *walk_subtrees, void *data)
2500 inline_data *id = (inline_data *) data;
2501 splay_tree st = id->decl_map;
2502 splay_tree_node n;
2504 /* Only a local declaration (variable or label). */
2505 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2506 || TREE_CODE (*tp) == LABEL_DECL)
2508 /* Lookup the declaration. */
2509 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2511 /* If it's there, remap it. */
2512 if (n)
2513 *tp = (tree) n->value;
2516 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2517 copy_statement_list (tp);
2518 else if (TREE_CODE (*tp) == BIND_EXPR)
2519 copy_bind_expr (tp, walk_subtrees, id);
2520 else if (TREE_CODE (*tp) == SAVE_EXPR)
2521 remap_save_expr (tp, st, walk_subtrees);
2522 else
2524 copy_tree_r (tp, walk_subtrees, NULL);
2526 /* Do whatever unsaving is required. */
2527 unsave_expr_1 (*tp);
2530 /* Keep iterating. */
2531 return NULL_TREE;
2534 /* Copies everything in EXPR and replaces variables, labels
2535 and SAVE_EXPRs local to EXPR. */
2537 tree
2538 unsave_expr_now (tree expr)
2540 inline_data id;
2542 /* There's nothing to do for NULL_TREE. */
2543 if (expr == 0)
2544 return expr;
2546 /* Set up ID. */
2547 memset (&id, 0, sizeof (id));
2548 VARRAY_TREE_INIT (id.fns, 1, "fns");
2549 VARRAY_PUSH_TREE (id.fns, current_function_decl);
2550 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2552 /* Walk the tree once to find local labels. */
2553 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2555 /* Walk the tree again, copying, remapping, and unsaving. */
2556 walk_tree (&expr, unsave_r, &id, NULL);
2558 /* Clean up. */
2559 splay_tree_delete (id.decl_map);
2561 return expr;
2564 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2566 static tree
2567 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2569 if (*tp == data)
2570 return (tree) data;
2571 else
2572 return NULL;
2575 bool
2576 debug_find_tree (tree top, tree search)
2578 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2581 /* Declare the variables created by the inliner. Add all the variables in
2582 VARS to BIND_EXPR. */
2584 static void
2585 declare_inline_vars (tree bind_expr, tree vars)
2587 tree t;
2588 for (t = vars; t; t = TREE_CHAIN (t))
2589 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2591 add_var_to_bind_expr (bind_expr, vars);