* gcc_update: libjava/configure.in -> configure.ac.
[official-gcc.git] / gcc / tree-inline.c
blobbb2af8dc2bc671d93f29851dd58bac3b8249f96a
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "integrate.h"
36 #include "varray.h"
37 #include "hashtab.h"
38 #include "splay-tree.h"
39 #include "langhooks.h"
40 #include "cgraph.h"
41 #include "intl.h"
42 #include "tree-mudflap.h"
43 #include "function.h"
44 #include "diagnostic.h"
46 /* I'm not real happy about this, but we need to handle gimple and
47 non-gimple trees. */
48 #include "tree-iterator.h"
49 #include "tree-gimple.h"
51 /* 0 if we should not perform inlining.
52 1 if we should expand functions calls inline at the tree level.
53 2 if we should consider *all* functions to be inline
54 candidates. */
56 int flag_inline_trees = 0;
58 /* To Do:
60 o In order to make inlining-on-trees work, we pessimized
61 function-local static constants. In particular, they are now
62 always output, even when not addressed. Fix this by treating
63 function-local static constants just like global static
64 constants; the back-end already knows not to output them if they
65 are not needed.
67 o Provide heuristics to clamp inlining of recursive template
68 calls? */
70 /* Data required for function inlining. */
72 typedef struct inline_data
74 /* A stack of the functions we are inlining. For example, if we are
75 compiling `f', which calls `g', which calls `h', and we are
76 inlining the body of `h', the stack will contain, `h', followed
77 by `g', followed by `f'. The first few elements of the stack may
78 contain other functions that we know we should not recurse into,
79 even though they are not directly being inlined. */
80 varray_type fns;
81 /* The index of the first element of FNS that really represents an
82 inlined function. */
83 unsigned first_inlined_fn;
84 /* The label to jump to when a return statement is encountered. If
85 this value is NULL, then return statements will simply be
86 remapped as return statements, rather than as jumps. */
87 tree ret_label;
88 /* The VAR_DECL for the return value. */
89 tree retvar;
90 /* The map from local declarations in the inlined function to
91 equivalents in the function into which it is being inlined. */
92 splay_tree decl_map;
93 /* Nonzero if we are currently within the cleanup for a
94 TARGET_EXPR. */
95 int in_target_cleanup_p;
96 /* A list of the functions current function has inlined. */
97 varray_type inlined_fns;
98 /* We use the same mechanism to build clones that we do to perform
99 inlining. However, there are a few places where we need to
100 distinguish between those two situations. This flag is true if
101 we are cloning, rather than inlining. */
102 bool cloning_p;
103 /* Similarly for saving function body. */
104 bool saving_p;
105 /* Hash table used to prevent walk_tree from visiting the same node
106 umpteen million times. */
107 htab_t tree_pruner;
108 /* Callgraph node of function we are inlining into. */
109 struct cgraph_node *node;
110 /* Callgraph node of currently inlined function. */
111 struct cgraph_node *current_node;
112 /* Statement iterator. We need this so we can keep the tree in
113 gimple form when we insert the inlined function. It is not
114 used when we are not dealing with gimple trees. */
115 tree_stmt_iterator tsi;
116 } inline_data;
118 /* Prototypes. */
120 /* The approximate number of instructions per statement. This number
121 need not be particularly accurate; it is used only to make
122 decisions about when a function is too big to inline. */
123 #define INSNS_PER_STMT (10)
125 static tree copy_body_r (tree *, int *, void *);
126 static tree copy_body (inline_data *);
127 static tree expand_call_inline (tree *, int *, void *);
128 static void expand_calls_inline (tree *, inline_data *);
129 static bool inlinable_function_p (tree);
130 static tree remap_decl (tree, inline_data *);
131 static tree remap_type (tree, inline_data *);
132 static tree initialize_inlined_parameters (inline_data *, tree,
133 tree, tree, tree);
134 static void remap_block (tree *, inline_data *);
135 static tree remap_decls (tree, inline_data *);
136 static void copy_bind_expr (tree *, int *, inline_data *);
137 static tree mark_local_for_remap_r (tree *, int *, void *);
138 static tree unsave_r (tree *, int *, void *);
139 static void declare_inline_vars (tree bind_expr, tree vars);
141 /* Insert a tree->tree mapping for ID. Despite the name suggests
142 that the trees should be variables, it is used for more than that. */
144 static void
145 insert_decl_map (inline_data *id, tree key, tree value)
147 splay_tree_insert (id->decl_map, (splay_tree_key) key,
148 (splay_tree_value) value);
150 /* Always insert an identity map as well. If we see this same new
151 node again, we won't want to duplicate it a second time. */
152 if (key != value)
153 splay_tree_insert (id->decl_map, (splay_tree_key) value,
154 (splay_tree_value) value);
157 /* Remap DECL during the copying of the BLOCK tree for the function.
158 We are only called to remap local variables in the current function. */
160 static tree
161 remap_decl (tree decl, inline_data *id)
163 splay_tree_node n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
164 tree fn = VARRAY_TOP_TREE (id->fns);
166 /* See if we have remapped this declaration. If we didn't already have an
167 equivalent for this declaration, create one now. */
168 if (!n)
170 /* Make a copy of the variable or label. */
171 tree t = copy_decl_for_inlining (decl, fn, VARRAY_TREE (id->fns, 0));
173 /* Remap types, if necessary. */
174 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
175 if (TREE_CODE (t) == TYPE_DECL)
176 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
177 else if (TREE_CODE (t) == PARM_DECL)
178 DECL_ARG_TYPE_AS_WRITTEN (t)
179 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
181 /* Remap sizes as necessary. */
182 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
183 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
185 /* If fields, do likewise for offset and qualifier. */
186 if (TREE_CODE (t) == FIELD_DECL)
188 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
189 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
190 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
193 #if 0
194 /* FIXME handle anon aggrs. */
195 if (! DECL_NAME (t) && TREE_TYPE (t)
196 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
198 /* For a VAR_DECL of anonymous type, we must also copy the
199 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
200 tree members = NULL;
201 tree src;
203 for (src = DECL_ANON_UNION_ELEMS (t); src;
204 src = TREE_CHAIN (src))
206 tree member = remap_decl (TREE_VALUE (src), id);
208 if (TREE_PURPOSE (src))
209 abort ();
210 members = tree_cons (NULL, member, members);
212 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
214 #endif
216 /* Remember it, so that if we encounter this local entity
217 again we can reuse this copy. */
218 insert_decl_map (id, decl, t);
219 return t;
222 return unshare_expr ((tree) n->value);
225 static tree
226 remap_type (tree type, inline_data *id)
228 splay_tree_node node;
229 tree new, t;
231 if (type == NULL)
232 return type;
234 /* See if we have remapped this type. */
235 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
236 if (node)
237 return (tree) node->value;
239 /* The type only needs remapping if it's variably modified by a variable
240 in the function we are inlining. */
241 if (! variably_modified_type_p (type, VARRAY_TOP_TREE (id->fns)))
243 insert_decl_map (id, type, type);
244 return type;
247 /* We do need a copy. build and register it now. If this is a pointer or
248 reference type, remap the designated type and make a new pointer or
249 reference type. */
250 if (TREE_CODE (type) == POINTER_TYPE)
252 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
253 TYPE_MODE (type),
254 TYPE_REF_CAN_ALIAS_ALL (type));
255 insert_decl_map (id, type, new);
256 return new;
258 else if (TREE_CODE (type) == REFERENCE_TYPE)
260 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
261 TYPE_MODE (type),
262 TYPE_REF_CAN_ALIAS_ALL (type));
263 insert_decl_map (id, type, new);
264 return new;
266 else
267 new = copy_node (type);
269 insert_decl_map (id, type, new);
271 /* This is a new type, not a copy of an old type. Need to reassociate
272 variants. We can handle everything except the main variant lazily. */
273 t = TYPE_MAIN_VARIANT (type);
274 if (type != t)
276 t = remap_type (t, id);
277 TYPE_MAIN_VARIANT (new) = t;
278 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
279 TYPE_NEXT_VARIANT (t) = new;
281 else
283 TYPE_MAIN_VARIANT (new) = new;
284 TYPE_NEXT_VARIANT (new) = NULL;
287 /* Lazily create pointer and reference types. */
288 TYPE_POINTER_TO (new) = NULL;
289 TYPE_REFERENCE_TO (new) = NULL;
291 switch (TREE_CODE (new))
293 case INTEGER_TYPE:
294 case REAL_TYPE:
295 case ENUMERAL_TYPE:
296 case BOOLEAN_TYPE:
297 case CHAR_TYPE:
298 t = TYPE_MIN_VALUE (new);
299 if (t && TREE_CODE (t) != INTEGER_CST)
300 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
302 t = TYPE_MAX_VALUE (new);
303 if (t && TREE_CODE (t) != INTEGER_CST)
304 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
305 return new;
307 case FUNCTION_TYPE:
308 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
309 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
310 return new;
312 case ARRAY_TYPE:
313 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
314 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
315 break;
317 case RECORD_TYPE:
318 case UNION_TYPE:
319 case QUAL_UNION_TYPE:
320 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
321 break;
323 case FILE_TYPE:
324 case SET_TYPE:
325 case OFFSET_TYPE:
326 default:
327 /* Shouldn't have been thought variable sized. */
328 abort ();
331 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
332 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
334 return new;
337 static tree
338 remap_decls (tree decls, inline_data *id)
340 tree old_var;
341 tree new_decls = NULL_TREE;
343 /* Remap its variables. */
344 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
346 tree new_var;
348 /* Remap the variable. */
349 new_var = remap_decl (old_var, id);
351 /* If we didn't remap this variable, so we can't mess with its
352 TREE_CHAIN. If we remapped this variable to the return slot, it's
353 already declared somewhere else, so don't declare it here. */
354 if (!new_var || new_var == id->retvar)
356 #ifdef ENABLE_CHECKING
357 else if (!DECL_P (new_var))
358 abort ();
359 #endif
360 else
362 TREE_CHAIN (new_var) = new_decls;
363 new_decls = new_var;
367 return nreverse (new_decls);
370 /* Copy the BLOCK to contain remapped versions of the variables
371 therein. And hook the new block into the block-tree. */
373 static void
374 remap_block (tree *block, inline_data *id)
376 tree old_block;
377 tree new_block;
378 tree fn;
380 /* Make the new block. */
381 old_block = *block;
382 new_block = make_node (BLOCK);
383 TREE_USED (new_block) = TREE_USED (old_block);
384 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
385 *block = new_block;
387 /* Remap its variables. */
388 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
390 fn = VARRAY_TREE (id->fns, 0);
391 #if 1
392 /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in
393 rest_of_compilation is a good start. */
394 if (id->cloning_p)
395 /* We're building a clone; DECL_INITIAL is still
396 error_mark_node, and current_binding_level is the parm
397 binding level. */
398 lang_hooks.decls.insert_block (new_block);
399 else
401 /* Attach this new block after the DECL_INITIAL block for the
402 function into which this block is being inlined. In
403 rest_of_compilation we will straighten out the BLOCK tree. */
404 tree *first_block;
405 if (DECL_INITIAL (fn))
406 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
407 else
408 first_block = &DECL_INITIAL (fn);
409 BLOCK_CHAIN (new_block) = *first_block;
410 *first_block = new_block;
412 #endif
413 /* Remember the remapped block. */
414 insert_decl_map (id, old_block, new_block);
417 static void
418 copy_statement_list (tree *tp)
420 tree_stmt_iterator oi, ni;
421 tree new;
423 new = alloc_stmt_list ();
424 ni = tsi_start (new);
425 oi = tsi_start (*tp);
426 *tp = new;
428 for (; !tsi_end_p (oi); tsi_next (&oi))
429 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
432 static void
433 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
435 tree block = BIND_EXPR_BLOCK (*tp);
436 /* Copy (and replace) the statement. */
437 copy_tree_r (tp, walk_subtrees, NULL);
438 if (block)
440 remap_block (&block, id);
441 BIND_EXPR_BLOCK (*tp) = block;
444 if (BIND_EXPR_VARS (*tp))
445 /* This will remap a lot of the same decls again, but this should be
446 harmless. */
447 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
450 /* Called from copy_body via walk_tree. DATA is really an `inline_data *'. */
452 static tree
453 copy_body_r (tree *tp, int *walk_subtrees, void *data)
455 inline_data *id = (inline_data *) data;
456 tree fn = VARRAY_TOP_TREE (id->fns);
458 #if 0
459 /* All automatic variables should have a DECL_CONTEXT indicating
460 what function they come from. */
461 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
462 && DECL_NAMESPACE_SCOPE_P (*tp))
463 if (! DECL_EXTERNAL (*tp) && ! TREE_STATIC (*tp))
464 abort ();
465 #endif
467 /* If this is a RETURN_EXPR, change it into a MODIFY_EXPR and a
468 GOTO_EXPR with the RET_LABEL as its target. */
469 if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
471 tree return_stmt = *tp;
472 tree goto_stmt;
474 /* Build the GOTO_EXPR. */
475 tree assignment = TREE_OPERAND (return_stmt, 0);
476 goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
477 TREE_USED (id->ret_label) = 1;
479 /* If we're returning something, just turn that into an
480 assignment into the equivalent of the original
481 RESULT_DECL. */
482 if (assignment)
484 /* Do not create a statement containing a naked RESULT_DECL. */
485 if (TREE_CODE (assignment) == RESULT_DECL)
486 gimplify_stmt (&assignment);
488 *tp = build (BIND_EXPR, void_type_node, NULL, NULL, NULL);
489 append_to_statement_list (assignment, &BIND_EXPR_BODY (*tp));
490 append_to_statement_list (goto_stmt, &BIND_EXPR_BODY (*tp));
492 /* If we're not returning anything just do the jump. */
493 else
494 *tp = goto_stmt;
496 /* Local variables and labels need to be replaced by equivalent
497 variables. We don't want to copy static variables; there's only
498 one of those, no matter how many times we inline the containing
499 function. Similarly for globals from an outer function. */
500 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
502 tree new_decl;
504 /* Remap the declaration. */
505 new_decl = remap_decl (*tp, id);
506 if (! new_decl)
507 abort ();
508 /* Replace this variable with the copy. */
509 STRIP_TYPE_NOPS (new_decl);
510 *tp = new_decl;
512 #if 0
513 else if (nonstatic_local_decl_p (*tp)
514 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
515 abort ();
516 #endif
517 else if (TREE_CODE (*tp) == STATEMENT_LIST)
518 copy_statement_list (tp);
519 else if (TREE_CODE (*tp) == SAVE_EXPR)
520 remap_save_expr (tp, id->decl_map, walk_subtrees);
521 else if (TREE_CODE (*tp) == BIND_EXPR)
522 copy_bind_expr (tp, walk_subtrees, id);
523 else if (TREE_CODE (*tp) == LABELED_BLOCK_EXPR)
525 /* We need a new copy of this labeled block; the EXIT_BLOCK_EXPR
526 will refer to it, so save a copy ready for remapping. We
527 save it in the decl_map, although it isn't a decl. */
528 tree new_block = copy_node (*tp);
529 insert_decl_map (id, *tp, new_block);
530 *tp = new_block;
532 else if (TREE_CODE (*tp) == EXIT_BLOCK_EXPR)
534 splay_tree_node n
535 = splay_tree_lookup (id->decl_map,
536 (splay_tree_key) TREE_OPERAND (*tp, 0));
537 /* We _must_ have seen the enclosing LABELED_BLOCK_EXPR. */
538 if (! n)
539 abort ();
540 *tp = copy_node (*tp);
541 TREE_OPERAND (*tp, 0) = (tree) n->value;
543 /* Types may need remapping as well. */
544 else if (TYPE_P (*tp))
545 *tp = remap_type (*tp, id);
547 /* Otherwise, just copy the node. Note that copy_tree_r already
548 knows not to copy VAR_DECLs, etc., so this is safe. */
549 else
551 tree old_node = *tp;
553 if (TREE_CODE (*tp) == MODIFY_EXPR
554 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
555 && (lang_hooks.tree_inlining.auto_var_in_fn_p
556 (TREE_OPERAND (*tp, 0), fn)))
558 /* Some assignments VAR = VAR; don't generate any rtl code
559 and thus don't count as variable modification. Avoid
560 keeping bogosities like 0 = 0. */
561 tree decl = TREE_OPERAND (*tp, 0), value;
562 splay_tree_node n;
564 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
565 if (n)
567 value = (tree) n->value;
568 STRIP_TYPE_NOPS (value);
569 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
571 *tp = value;
572 return copy_body_r (tp, walk_subtrees, data);
576 else if (TREE_CODE (*tp) == ADDR_EXPR
577 && (lang_hooks.tree_inlining.auto_var_in_fn_p
578 (TREE_OPERAND (*tp, 0), fn)))
580 /* Get rid of &* from inline substitutions. It can occur when
581 someone takes the address of a parm or return slot passed by
582 invisible reference. */
583 tree decl = TREE_OPERAND (*tp, 0), value;
584 splay_tree_node n;
586 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
587 if (n)
589 value = (tree) n->value;
590 if (TREE_CODE (value) == INDIRECT_REF)
592 if (!lang_hooks.types_compatible_p
593 (TREE_TYPE (*tp), TREE_TYPE (TREE_OPERAND (value, 0))))
594 *tp = fold_convert (TREE_TYPE (*tp),
595 TREE_OPERAND (value, 0));
596 else
597 *tp = TREE_OPERAND (value, 0);
599 return copy_body_r (tp, walk_subtrees, data);
603 else if (TREE_CODE (*tp) == INDIRECT_REF)
605 /* Get rid of *& from inline substitutions that can happen when a
606 pointer argument is an ADDR_EXPR. */
607 tree decl = TREE_OPERAND (*tp, 0), value;
608 splay_tree_node n;
610 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
611 if (n)
613 value = (tree) n->value;
614 STRIP_NOPS (value);
615 if (TREE_CODE (value) == ADDR_EXPR
616 && (lang_hooks.types_compatible_p
617 (TREE_TYPE (*tp), TREE_TYPE (TREE_OPERAND (value, 0)))))
619 *tp = TREE_OPERAND (value, 0);
620 return copy_body_r (tp, walk_subtrees, data);
625 copy_tree_r (tp, walk_subtrees, NULL);
627 if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp))
629 if (id->saving_p)
631 struct cgraph_node *node;
632 struct cgraph_edge *edge;
634 for (node = id->node->next_clone; node; node = node->next_clone)
636 edge = cgraph_edge (node, old_node);
637 if (edge)
638 edge->call_expr = *tp;
639 else
640 abort ();
643 else
645 struct cgraph_edge *edge
646 = cgraph_edge (id->current_node, old_node);
648 if (edge)
649 cgraph_clone_edge (edge, id->node, *tp);
653 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
655 /* The copied TARGET_EXPR has never been expanded, even if the
656 original node was expanded already. */
657 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
659 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
660 TREE_OPERAND (*tp, 3) = NULL_TREE;
664 /* Keep iterating. */
665 return NULL_TREE;
668 /* Make a copy of the body of FN so that it can be inserted inline in
669 another function. */
671 static tree
672 copy_body (inline_data *id)
674 tree body;
675 tree fndecl = VARRAY_TOP_TREE (id->fns);
677 if (fndecl == current_function_decl
678 && cfun->saved_tree)
679 body = cfun->saved_tree;
680 else
681 body = DECL_SAVED_TREE (fndecl);
682 walk_tree (&body, copy_body_r, id, NULL);
684 return body;
687 static void
688 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
689 tree *init_stmts, tree *vars, bool *gimplify_init_stmts_p)
691 tree init_stmt;
692 tree var;
694 /* If the parameter is never assigned to, we may not need to
695 create a new variable here at all. Instead, we may be able
696 to just use the argument value. */
697 if (TREE_READONLY (p)
698 && !TREE_ADDRESSABLE (p)
699 && value && !TREE_SIDE_EFFECTS (value))
701 /* We can't risk substituting complex expressions. They
702 might contain variables that will be assigned to later.
703 Theoretically, we could check the expression to see if
704 all of the variables that determine its value are
705 read-only, but we don't bother. */
706 /* We may produce non-gimple trees by adding NOPs or introduce
707 invalid sharing when operand is not really constant.
708 It is not big deal to prohibit constant propagation here as
709 we will constant propagate in DOM1 pass anyway. */
710 if (is_gimple_min_invariant (value)
711 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p)))
713 insert_decl_map (id, p, value);
714 return;
718 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
719 here since the type of this decl must be visible to the calling
720 function. */
721 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
723 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
724 that way, when the PARM_DECL is encountered, it will be
725 automatically replaced by the VAR_DECL. */
726 insert_decl_map (id, p, var);
728 /* Declare this new variable. */
729 TREE_CHAIN (var) = *vars;
730 *vars = var;
732 /* Make gimplifier happy about this variable. */
733 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
735 /* Even if P was TREE_READONLY, the new VAR should not be.
736 In the original code, we would have constructed a
737 temporary, and then the function body would have never
738 changed the value of P. However, now, we will be
739 constructing VAR directly. The constructor body may
740 change its value multiple times as it is being
741 constructed. Therefore, it must not be TREE_READONLY;
742 the back-end assumes that TREE_READONLY variable is
743 assigned to only once. */
744 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
745 TREE_READONLY (var) = 0;
747 /* Initialize this VAR_DECL from the equivalent argument. Convert
748 the argument to the proper type in case it was promoted. */
749 if (value)
751 tree rhs = fold_convert (TREE_TYPE (var), value);
753 if (rhs == error_mark_node)
754 return;
756 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
757 keep our trees in gimple form. */
758 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
759 append_to_statement_list (init_stmt, init_stmts);
761 /* If we did not create a gimple value and we did not create a gimple
762 cast of a gimple value, then we will need to gimplify INIT_STMTS
763 at the end. Note that is_gimple_cast only checks the outer
764 tree code, not its operand. Thus the explicit check that it's
765 operand is a gimple value. */
766 if (!is_gimple_val (rhs)
767 && (!is_gimple_cast (rhs)
768 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
769 *gimplify_init_stmts_p = true;
773 /* Generate code to initialize the parameters of the function at the
774 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
776 static tree
777 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
778 tree fn, tree bind_expr)
780 tree init_stmts = NULL_TREE;
781 tree parms;
782 tree a;
783 tree p;
784 tree vars = NULL_TREE;
785 bool gimplify_init_stmts_p = false;
786 int argnum = 0;
788 /* Figure out what the parameters are. */
789 parms = DECL_ARGUMENTS (fn);
790 if (fn == current_function_decl)
791 parms = cfun->saved_args;
793 /* Loop through the parameter declarations, replacing each with an
794 equivalent VAR_DECL, appropriately initialized. */
795 for (p = parms, a = args; p;
796 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
798 tree value;
800 ++argnum;
802 /* Find the initializer. */
803 value = lang_hooks.tree_inlining.convert_parm_for_inlining
804 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
806 setup_one_parameter (id, p, value, fn, &init_stmts, &vars,
807 &gimplify_init_stmts_p);
810 /* Evaluate trailing arguments. */
811 for (; a; a = TREE_CHAIN (a))
813 tree value = TREE_VALUE (a);
814 append_to_statement_list (value, &init_stmts);
817 /* Initialize the static chain. */
818 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
819 if (p)
821 /* No static chain? Seems like a bug in tree-nested.c. */
822 if (!static_chain)
823 abort ();
825 setup_one_parameter (id, p, static_chain, fn, &init_stmts, &vars,
826 &gimplify_init_stmts_p);
829 if (gimplify_init_stmts_p)
830 gimplify_body (&init_stmts, current_function_decl);
832 declare_inline_vars (bind_expr, vars);
833 return init_stmts;
836 /* Declare a return variable to replace the RESULT_DECL for the function we
837 are calling. RETURN_SLOT_ADDR, if non-null, was a fake parameter that
838 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
839 the MODIFY_EXPR to which this call is the RHS.
841 The return value is a (possibly null) value that is the result of the
842 function as seen by the callee. *USE_P is a (possibly null) value that
843 holds the result as seen by the caller. */
845 static tree
846 declare_return_variable (inline_data *id, tree return_slot_addr,
847 tree modify_dest, tree *use_p)
849 tree callee = VARRAY_TOP_TREE (id->fns);
850 tree caller = VARRAY_TREE (id->fns, 0);
851 tree result = DECL_RESULT (callee);
852 tree callee_type = TREE_TYPE (result);
853 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
854 tree var, use;
856 /* We don't need to do anything for functions that don't return
857 anything. */
858 if (!result || VOID_TYPE_P (callee_type))
860 *use_p = NULL_TREE;
861 return NULL_TREE;
864 /* If there was a return slot, then the return value the the
865 dereferenced address of that object. */
866 if (return_slot_addr)
868 /* The front end shouldn't have used both return_slot_addr and
869 a modify expression. */
870 if (modify_dest)
871 abort ();
872 var = build_fold_indirect_ref (return_slot_addr);
873 use = NULL;
874 goto done;
877 /* All types requiring non-trivial constructors should have been handled. */
878 if (TREE_ADDRESSABLE (callee_type))
879 abort ();
881 /* Attempt to avoid creating a new temporary variable. */
882 if (modify_dest)
884 bool use_it = false;
886 /* We can't use MODIFY_DEST if there's type promotion involved. */
887 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
888 use_it = false;
890 /* ??? If we're assigning to a variable sized type, then we must
891 reuse the destination variable, because we've no good way to
892 create variable sized temporaries at this point. */
893 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
894 use_it = true;
896 /* If the callee cannot possibly modify MODIFY_DEST, then we can
897 reuse it as the result of the call directly. Don't do this if
898 it would promote MODIFY_DEST to addressable. */
899 else if (!TREE_STATIC (modify_dest)
900 && !TREE_ADDRESSABLE (modify_dest)
901 && !TREE_ADDRESSABLE (result))
902 use_it = true;
904 if (use_it)
906 var = modify_dest;
907 use = NULL;
908 goto done;
912 if (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) != INTEGER_CST)
913 abort ();
915 var = copy_decl_for_inlining (result, callee, caller);
916 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
917 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
918 = tree_cons (NULL_TREE, var,
919 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
921 /* Do not have the rest of GCC warn about this variable as it should
922 not be visible to the user. */
923 TREE_NO_WARNING (var) = 1;
925 /* Build the use expr. If the return type of the function was
926 promoted, convert it back to the expected type. */
927 use = var;
928 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
929 use = fold_convert (caller_type, var);
931 done:
932 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
933 way, when the RESULT_DECL is encountered, it will be
934 automatically replaced by the VAR_DECL. */
935 insert_decl_map (id, result, var);
937 /* Remember this so we can ignore it in remap_decls. */
938 id->retvar = var;
940 *use_p = use;
941 return var;
944 /* Returns nonzero if a function can be inlined as a tree. */
946 bool
947 tree_inlinable_function_p (tree fn)
949 return inlinable_function_p (fn);
952 static const char *inline_forbidden_reason;
954 static tree
955 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
956 void *fnp)
958 tree node = *nodep;
959 tree fn = (tree) fnp;
960 tree t;
962 switch (TREE_CODE (node))
964 case CALL_EXPR:
965 /* Refuse to inline alloca call unless user explicitly forced so as
966 this may change program's memory overhead drastically when the
967 function using alloca is called in loop. In GCC present in
968 SPEC2000 inlining into schedule_block cause it to require 2GB of
969 RAM instead of 256MB. */
970 if (alloca_call_p (node)
971 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
973 inline_forbidden_reason
974 = N_("%Jfunction '%F' can never be inlined because it uses "
975 "alloca (override using the always_inline attribute)");
976 return node;
978 t = get_callee_fndecl (node);
979 if (! t)
980 break;
982 /* We cannot inline functions that call setjmp. */
983 if (setjmp_call_p (t))
985 inline_forbidden_reason
986 = N_("%Jfunction '%F' can never be inlined because it uses setjmp");
987 return node;
990 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
991 switch (DECL_FUNCTION_CODE (t))
993 /* We cannot inline functions that take a variable number of
994 arguments. */
995 case BUILT_IN_VA_START:
996 case BUILT_IN_STDARG_START:
997 case BUILT_IN_NEXT_ARG:
998 case BUILT_IN_VA_END:
999 inline_forbidden_reason
1000 = N_("%Jfunction '%F' can never be inlined because it "
1001 "uses variable argument lists");
1002 return node;
1004 case BUILT_IN_LONGJMP:
1005 /* We can't inline functions that call __builtin_longjmp at
1006 all. The non-local goto machinery really requires the
1007 destination be in a different function. If we allow the
1008 function calling __builtin_longjmp to be inlined into the
1009 function calling __builtin_setjmp, Things will Go Awry. */
1010 inline_forbidden_reason
1011 = N_("%Jfunction '%F' can never be inlined because "
1012 "it uses setjmp-longjmp exception handling");
1013 return node;
1015 case BUILT_IN_NONLOCAL_GOTO:
1016 /* Similarly. */
1017 inline_forbidden_reason
1018 = N_("%Jfunction '%F' can never be inlined because "
1019 "it uses non-local goto");
1020 return node;
1022 default:
1023 break;
1025 break;
1027 case BIND_EXPR:
1028 for (t = BIND_EXPR_VARS (node); t ; t = TREE_CHAIN (t))
1030 /* We cannot inline functions that contain other functions. */
1031 if (TREE_CODE (t) == FUNCTION_DECL && DECL_INITIAL (t))
1033 inline_forbidden_reason
1034 = N_("%Jfunction '%F' can never be inlined "
1035 "because it contains a nested function");
1036 return node;
1039 break;
1041 case GOTO_EXPR:
1042 t = TREE_OPERAND (node, 0);
1044 /* We will not inline a function which uses computed goto. The
1045 addresses of its local labels, which may be tucked into
1046 global storage, are of course not constant across
1047 instantiations, which causes unexpected behavior. */
1048 if (TREE_CODE (t) != LABEL_DECL)
1050 inline_forbidden_reason
1051 = N_("%Jfunction '%F' can never be inlined "
1052 "because it contains a computed goto");
1053 return node;
1055 break;
1057 case LABEL_EXPR:
1058 t = TREE_OPERAND (node, 0);
1059 if (DECL_NONLOCAL (t))
1061 /* We cannot inline a function that receives a non-local goto
1062 because we cannot remap the destination label used in the
1063 function that is performing the non-local goto. */
1064 inline_forbidden_reason
1065 = N_("%Jfunction '%F' can never be inlined "
1066 "because it receives a non-local goto");
1067 return node;
1069 break;
1071 case RECORD_TYPE:
1072 case UNION_TYPE:
1073 /* We cannot inline a function of the form
1075 void F (int i) { struct S { int ar[i]; } s; }
1077 Attempting to do so produces a catch-22.
1078 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1079 UNION_TYPE nodes, then it goes into infinite recursion on a
1080 structure containing a pointer to its own type. If it doesn't,
1081 then the type node for S doesn't get adjusted properly when
1082 F is inlined, and we abort in find_function_data. */
1083 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1084 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1086 inline_forbidden_reason
1087 = N_("%Jfunction '%F' can never be inlined "
1088 "because it uses variable sized variables");
1089 return node;
1092 default:
1093 break;
1096 return NULL_TREE;
1099 /* Return subexpression representing possible alloca call, if any. */
1100 static tree
1101 inline_forbidden_p (tree fndecl)
1103 location_t saved_loc = input_location;
1104 tree ret = walk_tree_without_duplicates (&DECL_SAVED_TREE (fndecl),
1105 inline_forbidden_p_1, fndecl);
1107 input_location = saved_loc;
1108 return ret;
1111 /* Returns nonzero if FN is a function that does not have any
1112 fundamental inline blocking properties. */
1114 static bool
1115 inlinable_function_p (tree fn)
1117 bool inlinable = true;
1119 /* If we've already decided this function shouldn't be inlined,
1120 there's no need to check again. */
1121 if (DECL_UNINLINABLE (fn))
1122 return false;
1124 /* See if there is any language-specific reason it cannot be
1125 inlined. (It is important that this hook be called early because
1126 in C++ it may result in template instantiation.)
1127 If the function is not inlinable for language-specific reasons,
1128 it is left up to the langhook to explain why. */
1129 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1131 /* If we don't have the function body available, we can't inline it.
1132 However, this should not be recorded since we also get here for
1133 forward declared inline functions. Therefore, return at once. */
1134 if (!DECL_SAVED_TREE (fn))
1135 return false;
1137 /* If we're not inlining at all, then we cannot inline this function. */
1138 else if (!flag_inline_trees)
1139 inlinable = false;
1141 /* Only try to inline functions if DECL_INLINE is set. This should be
1142 true for all functions declared `inline', and for all other functions
1143 as well with -finline-functions.
1145 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1146 it's the front-end that must set DECL_INLINE in this case, because
1147 dwarf2out loses if a function that does not have DECL_INLINE set is
1148 inlined anyway. That is why we have both DECL_INLINE and
1149 DECL_DECLARED_INLINE_P. */
1150 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1151 here should be redundant. */
1152 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1153 inlinable = false;
1155 else if (inline_forbidden_p (fn))
1157 /* See if we should warn about uninlinable functions. Previously,
1158 some of these warnings would be issued while trying to expand
1159 the function inline, but that would cause multiple warnings
1160 about functions that would for example call alloca. But since
1161 this a property of the function, just one warning is enough.
1162 As a bonus we can now give more details about the reason why a
1163 function is not inlinable.
1164 We only warn for functions declared `inline' by the user. */
1165 bool do_warning = (warn_inline
1166 && DECL_INLINE (fn)
1167 && DECL_DECLARED_INLINE_P (fn)
1168 && !DECL_IN_SYSTEM_HEADER (fn));
1170 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1171 sorry (inline_forbidden_reason, fn, fn);
1172 else if (do_warning)
1173 warning (inline_forbidden_reason, fn, fn);
1175 inlinable = false;
1178 /* Squirrel away the result so that we don't have to check again. */
1179 DECL_UNINLINABLE (fn) = !inlinable;
1181 return inlinable;
1184 /* Used by estimate_num_insns. Estimate number of instructions seen
1185 by given statement. */
1187 static tree
1188 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1190 int *count = data;
1191 tree x = *tp;
1193 if (TYPE_P (x) || DECL_P (x))
1195 *walk_subtrees = 0;
1196 return NULL;
1198 /* Assume that constants and references counts nothing. These should
1199 be majorized by amount of operations among them we count later
1200 and are common target of CSE and similar optimizations. */
1201 else if (TREE_CODE_CLASS (TREE_CODE (x)) == 'c'
1202 || TREE_CODE_CLASS (TREE_CODE (x)) == 'r')
1203 return NULL;
1205 switch (TREE_CODE (x))
1207 /* Containers have no cost. */
1208 case TREE_LIST:
1209 case TREE_VEC:
1210 case BLOCK:
1211 case COMPONENT_REF:
1212 case BIT_FIELD_REF:
1213 case INDIRECT_REF:
1214 case ARRAY_REF:
1215 case ARRAY_RANGE_REF:
1216 case OBJ_TYPE_REF:
1217 case EXC_PTR_EXPR: /* ??? */
1218 case FILTER_EXPR: /* ??? */
1219 case COMPOUND_EXPR:
1220 case BIND_EXPR:
1221 case LABELED_BLOCK_EXPR:
1222 case WITH_CLEANUP_EXPR:
1223 case NOP_EXPR:
1224 case VIEW_CONVERT_EXPR:
1225 case SAVE_EXPR:
1226 case ADDR_EXPR:
1227 case COMPLEX_EXPR:
1228 case EXIT_BLOCK_EXPR:
1229 case CASE_LABEL_EXPR:
1230 case SSA_NAME:
1231 case CATCH_EXPR:
1232 case EH_FILTER_EXPR:
1233 case STATEMENT_LIST:
1234 case ERROR_MARK:
1235 case NON_LVALUE_EXPR:
1236 case ENTRY_VALUE_EXPR:
1237 case FDESC_EXPR:
1238 case VA_ARG_EXPR:
1239 case TRY_CATCH_EXPR:
1240 case TRY_FINALLY_EXPR:
1241 case LABEL_EXPR:
1242 case GOTO_EXPR:
1243 case RETURN_EXPR:
1244 case EXIT_EXPR:
1245 case LOOP_EXPR:
1246 case PHI_NODE:
1247 case WITH_SIZE_EXPR:
1248 break;
1250 /* We don't account constants for now. Assume that the cost is amortized
1251 by operations that do use them. We may re-consider this decision once
1252 we are able to optimize the tree before estimating it's size and break
1253 out static initializers. */
1254 case IDENTIFIER_NODE:
1255 case INTEGER_CST:
1256 case REAL_CST:
1257 case COMPLEX_CST:
1258 case VECTOR_CST:
1259 case STRING_CST:
1260 *walk_subtrees = 0;
1261 return NULL;
1263 /* Recognize assignments of large structures and constructors of
1264 big arrays. */
1265 case INIT_EXPR:
1266 case MODIFY_EXPR:
1267 x = TREE_OPERAND (x, 0);
1268 /* FALLTHRU */
1269 case TARGET_EXPR:
1270 case CONSTRUCTOR:
1272 HOST_WIDE_INT size;
1274 size = int_size_in_bytes (TREE_TYPE (x));
1276 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1277 *count += 10;
1278 else
1279 *count += ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1281 break;
1283 /* Assign cost of 1 to usual operations.
1284 ??? We may consider mapping RTL costs to this. */
1285 case COND_EXPR:
1287 case PLUS_EXPR:
1288 case MINUS_EXPR:
1289 case MULT_EXPR:
1291 case FIX_TRUNC_EXPR:
1292 case FIX_CEIL_EXPR:
1293 case FIX_FLOOR_EXPR:
1294 case FIX_ROUND_EXPR:
1296 case NEGATE_EXPR:
1297 case FLOAT_EXPR:
1298 case MIN_EXPR:
1299 case MAX_EXPR:
1300 case ABS_EXPR:
1302 case LSHIFT_EXPR:
1303 case RSHIFT_EXPR:
1304 case LROTATE_EXPR:
1305 case RROTATE_EXPR:
1307 case BIT_IOR_EXPR:
1308 case BIT_XOR_EXPR:
1309 case BIT_AND_EXPR:
1310 case BIT_NOT_EXPR:
1312 case TRUTH_ANDIF_EXPR:
1313 case TRUTH_ORIF_EXPR:
1314 case TRUTH_AND_EXPR:
1315 case TRUTH_OR_EXPR:
1316 case TRUTH_XOR_EXPR:
1317 case TRUTH_NOT_EXPR:
1319 case LT_EXPR:
1320 case LE_EXPR:
1321 case GT_EXPR:
1322 case GE_EXPR:
1323 case EQ_EXPR:
1324 case NE_EXPR:
1325 case ORDERED_EXPR:
1326 case UNORDERED_EXPR:
1328 case UNLT_EXPR:
1329 case UNLE_EXPR:
1330 case UNGT_EXPR:
1331 case UNGE_EXPR:
1332 case UNEQ_EXPR:
1333 case LTGT_EXPR:
1335 case CONVERT_EXPR:
1337 case CONJ_EXPR:
1339 case PREDECREMENT_EXPR:
1340 case PREINCREMENT_EXPR:
1341 case POSTDECREMENT_EXPR:
1342 case POSTINCREMENT_EXPR:
1344 case SWITCH_EXPR:
1346 case ASM_EXPR:
1348 case RESX_EXPR:
1349 *count += 1;
1350 break;
1352 /* Few special cases of expensive operations. This is useful
1353 to avoid inlining on functions having too many of these. */
1354 case TRUNC_DIV_EXPR:
1355 case CEIL_DIV_EXPR:
1356 case FLOOR_DIV_EXPR:
1357 case ROUND_DIV_EXPR:
1358 case EXACT_DIV_EXPR:
1359 case TRUNC_MOD_EXPR:
1360 case CEIL_MOD_EXPR:
1361 case FLOOR_MOD_EXPR:
1362 case ROUND_MOD_EXPR:
1363 case RDIV_EXPR:
1364 *count += 10;
1365 break;
1366 case CALL_EXPR:
1368 tree decl = get_callee_fndecl (x);
1370 if (decl && DECL_BUILT_IN (decl))
1371 switch (DECL_FUNCTION_CODE (decl))
1373 case BUILT_IN_CONSTANT_P:
1374 *walk_subtrees = 0;
1375 return NULL_TREE;
1376 case BUILT_IN_EXPECT:
1377 return NULL_TREE;
1378 default:
1379 break;
1381 *count += 10;
1382 break;
1384 default:
1385 /* Abort here se we know we don't miss any nodes. */
1386 abort ();
1388 return NULL;
1391 /* Estimate number of instructions that will be created by expanding EXPR. */
1394 estimate_num_insns (tree expr)
1396 int num = 0;
1397 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1398 return num;
1401 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1403 static tree
1404 expand_call_inline (tree *tp, int *walk_subtrees, void *data)
1406 inline_data *id;
1407 tree t;
1408 tree expr;
1409 tree stmt;
1410 tree use_retvar;
1411 tree decl;
1412 tree fn;
1413 tree arg_inits;
1414 tree *inlined_body;
1415 splay_tree st;
1416 tree args;
1417 tree return_slot_addr;
1418 tree modify_dest;
1419 location_t saved_location;
1420 struct cgraph_edge *edge;
1421 const char *reason;
1423 /* See what we've got. */
1424 id = (inline_data *) data;
1425 t = *tp;
1427 /* Set input_location here so we get the right instantiation context
1428 if we call instantiate_decl from inlinable_function_p. */
1429 saved_location = input_location;
1430 if (EXPR_HAS_LOCATION (t))
1431 input_location = EXPR_LOCATION (t);
1433 /* Recurse, but letting recursive invocations know that we are
1434 inside the body of a TARGET_EXPR. */
1435 if (TREE_CODE (*tp) == TARGET_EXPR)
1437 #if 0
1438 int i, len = first_rtl_op (TARGET_EXPR);
1440 /* We're walking our own subtrees. */
1441 *walk_subtrees = 0;
1443 /* Actually walk over them. This loop is the body of
1444 walk_trees, omitting the case where the TARGET_EXPR
1445 itself is handled. */
1446 for (i = 0; i < len; ++i)
1448 if (i == 2)
1449 ++id->in_target_cleanup_p;
1450 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
1451 id->tree_pruner);
1452 if (i == 2)
1453 --id->in_target_cleanup_p;
1456 goto egress;
1457 #endif
1460 if (TYPE_P (t))
1461 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1462 them should not be expanded. This can happen if the type is a
1463 dynamic array type, for example. */
1464 *walk_subtrees = 0;
1466 /* From here on, we're only interested in CALL_EXPRs. */
1467 if (TREE_CODE (t) != CALL_EXPR)
1468 goto egress;
1470 /* First, see if we can figure out what function is being called.
1471 If we cannot, then there is no hope of inlining the function. */
1472 fn = get_callee_fndecl (t);
1473 if (!fn)
1474 goto egress;
1476 /* Turn forward declarations into real ones. */
1477 fn = cgraph_node (fn)->decl;
1479 /* If fn is a declaration of a function in a nested scope that was
1480 globally declared inline, we don't set its DECL_INITIAL.
1481 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1482 C++ front-end uses it for cdtors to refer to their internal
1483 declarations, that are not real functions. Fortunately those
1484 don't have trees to be saved, so we can tell by checking their
1485 DECL_SAVED_TREE. */
1486 if (! DECL_INITIAL (fn)
1487 && DECL_ABSTRACT_ORIGIN (fn)
1488 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1489 fn = DECL_ABSTRACT_ORIGIN (fn);
1491 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1492 Kill this check once this is fixed. */
1493 if (!id->current_node->analyzed)
1494 goto egress;
1496 edge = cgraph_edge (id->current_node, t);
1498 /* Constant propagation on argument done during previous inlining
1499 may create new direct call. Produce an edge for it. */
1500 if (!edge)
1502 struct cgraph_node *dest = cgraph_node (fn);
1504 /* We have missing edge in the callgraph. This can happen in one case
1505 where previous inlining turned indirect call into direct call by
1506 constant propagating arguments. In all other cases we hit a bug
1507 (incorrect node sharing is most common reason for missing edges. */
1508 if (!dest->needed)
1509 abort ();
1510 cgraph_create_edge (id->node, dest, t)->inline_failed
1511 = N_("originally indirect function call not considered for inlining");
1512 goto egress;
1515 /* Don't try to inline functions that are not well-suited to
1516 inlining. */
1517 if (!cgraph_inline_p (edge, &reason))
1519 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1521 sorry ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
1522 sorry ("called from here");
1524 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1525 && !DECL_IN_SYSTEM_HEADER (fn)
1526 && strlen (reason))
1528 warning ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
1529 warning ("called from here");
1531 goto egress;
1534 #ifdef ENABLE_CHECKING
1535 if (edge->callee->decl != id->node->decl)
1536 verify_cgraph_node (edge->callee);
1537 #endif
1539 if (! lang_hooks.tree_inlining.start_inlining (fn))
1540 goto egress;
1542 /* Build a block containing code to initialize the arguments, the
1543 actual inline expansion of the body, and a label for the return
1544 statements within the function to jump to. The type of the
1545 statement expression is the return type of the function call. */
1546 stmt = NULL;
1547 expr = build (BIND_EXPR, void_type_node, NULL_TREE,
1548 stmt, make_node (BLOCK));
1549 BLOCK_ABSTRACT_ORIGIN (BIND_EXPR_BLOCK (expr)) = fn;
1551 /* Local declarations will be replaced by their equivalents in this
1552 map. */
1553 st = id->decl_map;
1554 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
1555 NULL, NULL);
1557 /* Initialize the parameters. */
1558 args = TREE_OPERAND (t, 1);
1559 return_slot_addr = NULL_TREE;
1560 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
1562 return_slot_addr = TREE_VALUE (args);
1563 args = TREE_CHAIN (args);
1564 TREE_TYPE (expr) = void_type_node;
1567 arg_inits = initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2),
1568 fn, expr);
1569 if (arg_inits)
1571 /* Expand any inlined calls in the initializers. Do this before we
1572 push FN on the stack of functions we are inlining; we want to
1573 inline calls to FN that appear in the initializers for the
1574 parameters.
1576 Note we need to save and restore the saved tree statement iterator
1577 to avoid having it clobbered by expand_calls_inline. */
1578 tree_stmt_iterator save_tsi;
1580 save_tsi = id->tsi;
1581 expand_calls_inline (&arg_inits, id);
1582 id->tsi = save_tsi;
1584 /* And add them to the tree. */
1585 append_to_statement_list (arg_inits, &BIND_EXPR_BODY (expr));
1588 /* Record the function we are about to inline so that we can avoid
1589 recursing into it. */
1590 VARRAY_PUSH_TREE (id->fns, fn);
1592 /* Record the function we are about to inline if optimize_function
1593 has not been called on it yet and we don't have it in the list. */
1594 if (! DECL_INLINED_FNS (fn))
1596 int i;
1598 for (i = VARRAY_ACTIVE_SIZE (id->inlined_fns) - 1; i >= 0; i--)
1599 if (VARRAY_TREE (id->inlined_fns, i) == fn)
1600 break;
1601 if (i < 0)
1602 VARRAY_PUSH_TREE (id->inlined_fns, fn);
1605 /* Return statements in the function body will be replaced by jumps
1606 to the RET_LABEL. */
1607 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1608 DECL_ARTIFICIAL (id->ret_label) = 1;
1609 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
1610 insert_decl_map (id, id->ret_label, id->ret_label);
1612 if (! DECL_INITIAL (fn)
1613 || TREE_CODE (DECL_INITIAL (fn)) != BLOCK)
1614 abort ();
1616 /* Find the lhs to which the result of this call is assigned. */
1617 modify_dest = tsi_stmt (id->tsi);
1618 if (TREE_CODE (modify_dest) == MODIFY_EXPR)
1619 modify_dest = TREE_OPERAND (modify_dest, 0);
1620 else
1621 modify_dest = NULL;
1623 /* Declare the return variable for the function. */
1624 decl = declare_return_variable (id, return_slot_addr,
1625 modify_dest, &use_retvar);
1627 /* After we've initialized the parameters, we insert the body of the
1628 function itself. */
1630 struct cgraph_node *old_node = id->current_node;
1632 id->current_node = edge->callee;
1633 append_to_statement_list (copy_body (id), &BIND_EXPR_BODY (expr));
1634 id->current_node = old_node;
1636 inlined_body = &BIND_EXPR_BODY (expr);
1638 /* After the body of the function comes the RET_LABEL. This must come
1639 before we evaluate the returned value below, because that evaluation
1640 may cause RTL to be generated. */
1641 if (TREE_USED (id->ret_label))
1643 tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
1644 append_to_statement_list (label, &BIND_EXPR_BODY (expr));
1647 /* Clean up. */
1648 splay_tree_delete (id->decl_map);
1649 id->decl_map = st;
1651 /* The new expression has side-effects if the old one did. */
1652 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
1654 tsi_link_before (&id->tsi, expr, TSI_SAME_STMT);
1656 /* If the inlined function returns a result that we care about,
1657 then we're going to need to splice in a MODIFY_EXPR. Otherwise
1658 the call was a standalone statement and we can just replace it
1659 with the BIND_EXPR inline representation of the called function. */
1660 if (!use_retvar || !modify_dest)
1661 *tsi_stmt_ptr (id->tsi) = build_empty_stmt ();
1662 else
1663 *tp = use_retvar;
1665 /* When we gimplify a function call, we may clear TREE_SIDE_EFFECTS on
1666 the call if it is to a "const" function. Thus the copy of
1667 TREE_SIDE_EFFECTS from the CALL_EXPR to the BIND_EXPR above with
1668 result in TREE_SIDE_EFFECTS not being set for the inlined copy of a
1669 "const" function.
1671 Unfortunately, that is wrong as inlining the function can create/expose
1672 interesting side effects (such as setting of a return value).
1674 The easiest solution is to simply recalculate TREE_SIDE_EFFECTS for
1675 the toplevel expression. */
1676 recalculate_side_effects (expr);
1678 /* Update callgraph if needed. */
1679 cgraph_remove_node (edge->callee);
1681 /* Recurse into the body of the just inlined function. */
1682 expand_calls_inline (inlined_body, id);
1683 VARRAY_POP (id->fns);
1685 /* Don't walk into subtrees. We've already handled them above. */
1686 *walk_subtrees = 0;
1688 lang_hooks.tree_inlining.end_inlining (fn);
1690 /* Keep iterating. */
1691 egress:
1692 input_location = saved_location;
1693 return NULL_TREE;
1696 static void
1697 expand_calls_inline (tree *stmt_p, inline_data *id)
1699 tree stmt = *stmt_p;
1700 enum tree_code code = TREE_CODE (stmt);
1701 int dummy;
1703 switch (code)
1705 case STATEMENT_LIST:
1707 tree_stmt_iterator i;
1708 tree new;
1710 for (i = tsi_start (stmt); !tsi_end_p (i); )
1712 id->tsi = i;
1713 expand_calls_inline (tsi_stmt_ptr (i), id);
1715 new = tsi_stmt (i);
1716 if (TREE_CODE (new) == STATEMENT_LIST)
1718 tsi_link_before (&i, new, TSI_SAME_STMT);
1719 tsi_delink (&i);
1721 else
1722 tsi_next (&i);
1725 break;
1727 case COND_EXPR:
1728 expand_calls_inline (&COND_EXPR_THEN (stmt), id);
1729 expand_calls_inline (&COND_EXPR_ELSE (stmt), id);
1730 break;
1732 case CATCH_EXPR:
1733 expand_calls_inline (&CATCH_BODY (stmt), id);
1734 break;
1736 case EH_FILTER_EXPR:
1737 expand_calls_inline (&EH_FILTER_FAILURE (stmt), id);
1738 break;
1740 case TRY_CATCH_EXPR:
1741 case TRY_FINALLY_EXPR:
1742 expand_calls_inline (&TREE_OPERAND (stmt, 0), id);
1743 expand_calls_inline (&TREE_OPERAND (stmt, 1), id);
1744 break;
1746 case BIND_EXPR:
1747 expand_calls_inline (&BIND_EXPR_BODY (stmt), id);
1748 break;
1750 case COMPOUND_EXPR:
1751 /* We're gimple. We should have gotten rid of all these. */
1752 abort ();
1754 case RETURN_EXPR:
1755 stmt_p = &TREE_OPERAND (stmt, 0);
1756 stmt = *stmt_p;
1757 if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
1758 break;
1760 /* FALLTHRU */
1762 case MODIFY_EXPR:
1763 stmt_p = &TREE_OPERAND (stmt, 1);
1764 stmt = *stmt_p;
1765 if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
1767 stmt_p = &TREE_OPERAND (stmt, 0);
1768 stmt = *stmt_p;
1770 if (TREE_CODE (stmt) != CALL_EXPR)
1771 break;
1773 /* FALLTHRU */
1775 case CALL_EXPR:
1776 expand_call_inline (stmt_p, &dummy, id);
1777 break;
1779 default:
1780 break;
1784 /* Expand calls to inline functions in the body of FN. */
1786 void
1787 optimize_inline_calls (tree fn)
1789 inline_data id;
1790 tree prev_fn;
1791 tree ifn;
1793 /* There is no point in performing inlining if errors have already
1794 occurred -- and we might crash if we try to inline invalid
1795 code. */
1796 if (errorcount || sorrycount)
1797 return;
1799 /* Clear out ID. */
1800 memset (&id, 0, sizeof (id));
1802 id.current_node = id.node = cgraph_node (fn);
1803 /* Don't allow recursion into FN. */
1804 VARRAY_TREE_INIT (id.fns, 32, "fns");
1805 VARRAY_PUSH_TREE (id.fns, fn);
1806 /* Or any functions that aren't finished yet. */
1807 prev_fn = NULL_TREE;
1808 if (current_function_decl)
1810 VARRAY_PUSH_TREE (id.fns, current_function_decl);
1811 prev_fn = current_function_decl;
1814 prev_fn = lang_hooks.tree_inlining.add_pending_fn_decls (&id.fns, prev_fn);
1816 /* Create the list of functions this call will inline. */
1817 VARRAY_TREE_INIT (id.inlined_fns, 32, "inlined_fns");
1819 /* Keep track of the low-water mark, i.e., the point where the first
1820 real inlining is represented in ID.FNS. */
1821 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1823 /* Replace all calls to inline functions with the bodies of those
1824 functions. */
1825 id.tree_pruner = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
1826 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1828 /* Clean up. */
1829 htab_delete (id.tree_pruner);
1830 ifn = make_tree_vec (VARRAY_ACTIVE_SIZE (id.inlined_fns));
1831 if (VARRAY_ACTIVE_SIZE (id.inlined_fns))
1832 memcpy (&TREE_VEC_ELT (ifn, 0), &VARRAY_TREE (id.inlined_fns, 0),
1833 VARRAY_ACTIVE_SIZE (id.inlined_fns) * sizeof (tree));
1834 DECL_INLINED_FNS (fn) = ifn;
1836 #ifdef ENABLE_CHECKING
1838 struct cgraph_edge *e;
1840 verify_cgraph_node (id.node);
1842 /* Double check that we inlined everything we are supposed to inline. */
1843 for (e = id.node->callees; e; e = e->next_callee)
1844 if (!e->inline_failed)
1845 abort ();
1847 #endif
1850 /* FN is a function that has a complete body, and CLONE is a function whose
1851 body is to be set to a copy of FN, mapping argument declarations according
1852 to the ARG_MAP splay_tree. */
1854 void
1855 clone_body (tree clone, tree fn, void *arg_map)
1857 inline_data id;
1859 /* Clone the body, as if we were making an inline call. But, remap the
1860 parameters in the callee to the parameters of caller. If there's an
1861 in-charge parameter, map it to an appropriate constant. */
1862 memset (&id, 0, sizeof (id));
1863 VARRAY_TREE_INIT (id.fns, 2, "fns");
1864 VARRAY_PUSH_TREE (id.fns, clone);
1865 VARRAY_PUSH_TREE (id.fns, fn);
1866 id.decl_map = (splay_tree)arg_map;
1868 /* Cloning is treated slightly differently from inlining. Set
1869 CLONING_P so that it's clear which operation we're performing. */
1870 id.cloning_p = true;
1872 /* Actually copy the body. */
1873 append_to_statement_list_force (copy_body (&id), &DECL_SAVED_TREE (clone));
1876 /* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
1877 in *arg_copy and of the static chain, if any, in *sc_copy. */
1879 tree
1880 save_body (tree fn, tree *arg_copy, tree *sc_copy)
1882 inline_data id;
1883 tree body, *parg;
1885 memset (&id, 0, sizeof (id));
1886 VARRAY_TREE_INIT (id.fns, 1, "fns");
1887 VARRAY_PUSH_TREE (id.fns, fn);
1888 id.node = cgraph_node (fn);
1889 id.saving_p = true;
1890 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
1891 *arg_copy = DECL_ARGUMENTS (fn);
1893 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
1895 tree new = copy_node (*parg);
1897 lang_hooks.dup_lang_specific_decl (new);
1898 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
1899 insert_decl_map (&id, *parg, new);
1900 TREE_CHAIN (new) = TREE_CHAIN (*parg);
1901 *parg = new;
1904 *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1905 if (*sc_copy)
1907 tree new = copy_node (*sc_copy);
1909 lang_hooks.dup_lang_specific_decl (new);
1910 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
1911 insert_decl_map (&id, *sc_copy, new);
1912 TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
1913 *sc_copy = new;
1916 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
1918 /* Actually copy the body. */
1919 body = copy_body (&id);
1921 /* Clean up. */
1922 splay_tree_delete (id.decl_map);
1923 return body;
1926 #define WALK_SUBTREE(NODE) \
1927 do \
1929 result = walk_tree (&(NODE), func, data, htab); \
1930 if (result) \
1931 return result; \
1933 while (0)
1935 /* This is a subroutine of walk_tree that walks field of TYPE that are to
1936 be walked whenever a type is seen in the tree. Rest of operands and return
1937 value are as for walk_tree. */
1939 static tree
1940 walk_type_fields (tree type, walk_tree_fn func, void *data, void *htab)
1942 tree result = NULL_TREE;
1944 switch (TREE_CODE (type))
1946 case POINTER_TYPE:
1947 case REFERENCE_TYPE:
1948 /* We have to worry about mutually recursive pointers. These can't
1949 be written in C. They can in Ada. It's pathlogical, but
1950 there's an ACATS test (c38102a) that checks it. Deal with this
1951 by checking if we're pointing to another pointer, that one
1952 points to another pointer, that one does too, and we have no htab.
1953 If so, get a hash table. We check three levels deep to avoid
1954 the cost of the hash table if we don't need one. */
1955 if (POINTER_TYPE_P (TREE_TYPE (type))
1956 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
1957 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
1958 && !htab)
1960 result = walk_tree_without_duplicates (&TREE_TYPE (type),
1961 func, data);
1962 if (result)
1963 return result;
1965 break;
1968 /* ... fall through ... */
1970 case COMPLEX_TYPE:
1971 WALK_SUBTREE (TREE_TYPE (type));
1972 break;
1974 case METHOD_TYPE:
1975 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
1977 /* Fall through. */
1979 case FUNCTION_TYPE:
1980 WALK_SUBTREE (TREE_TYPE (type));
1982 tree arg;
1984 /* We never want to walk into default arguments. */
1985 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
1986 WALK_SUBTREE (TREE_VALUE (arg));
1988 break;
1990 case ARRAY_TYPE:
1991 /* Don't follow this nodes's type if a pointer for fear that we'll
1992 have infinite recursion. Those types are uninteresting anyway. */
1993 if (!POINTER_TYPE_P (TREE_TYPE (type))
1994 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE)
1995 WALK_SUBTREE (TREE_TYPE (type));
1996 WALK_SUBTREE (TYPE_DOMAIN (type));
1997 break;
1999 case BOOLEAN_TYPE:
2000 case ENUMERAL_TYPE:
2001 case INTEGER_TYPE:
2002 case CHAR_TYPE:
2003 case REAL_TYPE:
2004 WALK_SUBTREE (TYPE_MIN_VALUE (type));
2005 WALK_SUBTREE (TYPE_MAX_VALUE (type));
2006 break;
2008 case OFFSET_TYPE:
2009 WALK_SUBTREE (TREE_TYPE (type));
2010 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
2011 break;
2013 default:
2014 break;
2017 return NULL_TREE;
2020 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
2021 called with the DATA and the address of each sub-tree. If FUNC returns a
2022 non-NULL value, the traversal is aborted, and the value returned by FUNC
2023 is returned. If HTAB is non-NULL it is used to record the nodes visited,
2024 and to avoid visiting a node more than once. */
2026 tree
2027 walk_tree (tree *tp, walk_tree_fn func, void *data, void *htab_)
2029 htab_t htab = (htab_t) htab_;
2030 enum tree_code code;
2031 int walk_subtrees;
2032 tree result;
2034 #define WALK_SUBTREE_TAIL(NODE) \
2035 do \
2037 tp = & (NODE); \
2038 goto tail_recurse; \
2040 while (0)
2042 tail_recurse:
2043 /* Skip empty subtrees. */
2044 if (!*tp)
2045 return NULL_TREE;
2047 if (htab)
2049 void **slot;
2051 /* Don't walk the same tree twice, if the user has requested
2052 that we avoid doing so. */
2053 slot = htab_find_slot (htab, *tp, INSERT);
2054 if (*slot)
2055 return NULL_TREE;
2056 *slot = *tp;
2059 /* Call the function. */
2060 walk_subtrees = 1;
2061 result = (*func) (tp, &walk_subtrees, data);
2063 /* If we found something, return it. */
2064 if (result)
2065 return result;
2067 code = TREE_CODE (*tp);
2069 /* Even if we didn't, FUNC may have decided that there was nothing
2070 interesting below this point in the tree. */
2071 if (!walk_subtrees)
2073 if (code == TREE_LIST)
2074 /* But we still need to check our siblings. */
2075 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2076 else
2077 return NULL_TREE;
2080 result = lang_hooks.tree_inlining.walk_subtrees (tp, &walk_subtrees, func,
2081 data, htab);
2082 if (result || ! walk_subtrees)
2083 return result;
2085 /* If this is a DECL_EXPR, walk into various fields of the type that it's
2086 defining. We only want to walk into these fields of a type in this
2087 case. Note that decls get walked as part of the processing of a
2088 BIND_EXPR.
2090 ??? Precisely which fields of types that we are supposed to walk in
2091 this case vs. the normal case aren't well defined. */
2092 if (code == DECL_EXPR
2093 && TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL
2094 && TREE_CODE (TREE_TYPE (DECL_EXPR_DECL (*tp))) != ERROR_MARK)
2096 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
2098 /* Call the function for the type. See if it returns anything or
2099 doesn't want us to continue. If we are to continue, walk both
2100 the normal fields and those for the declaration case. */
2101 result = (*func) (type_p, &walk_subtrees, data);
2102 if (result || !walk_subtrees)
2103 return NULL_TREE;
2105 result = walk_type_fields (*type_p, func, data, htab_);
2106 if (result)
2107 return result;
2109 WALK_SUBTREE (TYPE_SIZE (*type_p));
2110 WALK_SUBTREE (TYPE_SIZE_UNIT (*type_p));
2112 /* If this is a record type, also walk the fields. */
2113 if (TREE_CODE (*type_p) == RECORD_TYPE
2114 || TREE_CODE (*type_p) == UNION_TYPE
2115 || TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2117 tree field;
2119 for (field = TYPE_FIELDS (*type_p); field;
2120 field = TREE_CHAIN (field))
2122 /* We'd like to look at the type of the field, but we can easily
2123 get infinite recursion. So assume it's pointed to elsewhere
2124 in the tree. Also, ignore things that aren't fields. */
2125 if (TREE_CODE (field) != FIELD_DECL)
2126 continue;
2128 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
2129 WALK_SUBTREE (DECL_SIZE (field));
2130 WALK_SUBTREE (DECL_SIZE_UNIT (field));
2131 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2132 WALK_SUBTREE (DECL_QUALIFIER (field));
2137 else if (code != EXIT_BLOCK_EXPR
2138 && code != SAVE_EXPR
2139 && code != BIND_EXPR
2140 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2142 int i, len;
2144 /* Walk over all the sub-trees of this operand. */
2145 len = first_rtl_op (code);
2146 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
2147 But, we only want to walk once. */
2148 if (code == TARGET_EXPR
2149 && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
2150 --len;
2152 /* Go through the subtrees. We need to do this in forward order so
2153 that the scope of a FOR_EXPR is handled properly. */
2154 #ifdef DEBUG_WALK_TREE
2155 for (i = 0; i < len; ++i)
2156 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2157 #else
2158 for (i = 0; i < len - 1; ++i)
2159 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2161 if (len)
2163 /* The common case is that we may tail recurse here. */
2164 if (code != BIND_EXPR
2165 && !TREE_CHAIN (*tp))
2166 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
2167 else
2168 WALK_SUBTREE (TREE_OPERAND (*tp, len - 1));
2170 #endif
2173 /* If this is a type, walk the needed fields in the type. */
2174 else if (TYPE_P (*tp))
2176 result = walk_type_fields (*tp, func, data, htab_);
2177 if (result)
2178 return result;
2180 else
2182 /* Not one of the easy cases. We must explicitly go through the
2183 children. */
2184 switch (code)
2186 case ERROR_MARK:
2187 case IDENTIFIER_NODE:
2188 case INTEGER_CST:
2189 case REAL_CST:
2190 case VECTOR_CST:
2191 case STRING_CST:
2192 case BLOCK:
2193 case PLACEHOLDER_EXPR:
2194 case SSA_NAME:
2195 case FIELD_DECL:
2196 case RESULT_DECL:
2197 /* None of thse have subtrees other than those already walked
2198 above. */
2199 break;
2201 case TREE_LIST:
2202 WALK_SUBTREE (TREE_VALUE (*tp));
2203 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2204 break;
2206 case TREE_VEC:
2208 int len = TREE_VEC_LENGTH (*tp);
2210 if (len == 0)
2211 break;
2213 /* Walk all elements but the first. */
2214 while (--len)
2215 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
2217 /* Now walk the first one as a tail call. */
2218 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
2221 case COMPLEX_CST:
2222 WALK_SUBTREE (TREE_REALPART (*tp));
2223 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
2225 case CONSTRUCTOR:
2226 WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
2228 case EXIT_BLOCK_EXPR:
2229 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 1));
2231 case SAVE_EXPR:
2232 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
2234 case BIND_EXPR:
2236 tree decl;
2237 for (decl = BIND_EXPR_VARS (*tp); decl; decl = TREE_CHAIN (decl))
2239 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
2240 into declarations that are just mentioned, rather than
2241 declared; they don't really belong to this part of the tree.
2242 And, we can see cycles: the initializer for a declaration
2243 can refer to the declaration itself. */
2244 WALK_SUBTREE (DECL_INITIAL (decl));
2245 WALK_SUBTREE (DECL_SIZE (decl));
2246 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
2248 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
2251 case STATEMENT_LIST:
2253 tree_stmt_iterator i;
2254 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
2255 WALK_SUBTREE (*tsi_stmt_ptr (i));
2257 break;
2259 default:
2260 /* ??? This could be a language-defined node. We really should make
2261 a hook for it, but right now just ignore it. */
2262 break;
2266 /* We didn't find what we were looking for. */
2267 return NULL_TREE;
2269 #undef WALK_SUBTREE
2270 #undef WALK_SUBTREE_TAIL
2273 /* Like walk_tree, but does not walk duplicate nodes more than once. */
2275 tree
2276 walk_tree_without_duplicates (tree *tp, walk_tree_fn func, void *data)
2278 tree result;
2279 htab_t htab;
2281 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
2282 result = walk_tree (tp, func, data, htab);
2283 htab_delete (htab);
2284 return result;
2287 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2289 tree
2290 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2292 enum tree_code code = TREE_CODE (*tp);
2294 /* We make copies of most nodes. */
2295 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2296 || TREE_CODE_CLASS (code) == 'c'
2297 || code == TREE_LIST
2298 || code == TREE_VEC
2299 || code == TYPE_DECL)
2301 /* Because the chain gets clobbered when we make a copy, we save it
2302 here. */
2303 tree chain = TREE_CHAIN (*tp);
2304 tree new;
2306 /* Copy the node. */
2307 new = copy_node (*tp);
2309 /* Propagate mudflap marked-ness. */
2310 if (flag_mudflap && mf_marked_p (*tp))
2311 mf_mark (new);
2313 *tp = new;
2315 /* Now, restore the chain, if appropriate. That will cause
2316 walk_tree to walk into the chain as well. */
2317 if (code == PARM_DECL || code == TREE_LIST)
2318 TREE_CHAIN (*tp) = chain;
2320 /* For now, we don't update BLOCKs when we make copies. So, we
2321 have to nullify all BIND_EXPRs. */
2322 if (TREE_CODE (*tp) == BIND_EXPR)
2323 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2326 else if (TREE_CODE_CLASS (code) == 't')
2327 *walk_subtrees = 0;
2328 else if (TREE_CODE_CLASS (code) == 'd')
2329 *walk_subtrees = 0;
2330 else if (code == STATEMENT_LIST)
2331 abort ();
2333 return NULL_TREE;
2336 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2337 information indicating to what new SAVE_EXPR this one should be mapped,
2338 use that one. Otherwise, create a new node and enter it in ST. */
2340 void
2341 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2343 splay_tree st = (splay_tree) st_;
2344 splay_tree_node n;
2345 tree t;
2347 /* See if we already encountered this SAVE_EXPR. */
2348 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2350 /* If we didn't already remap this SAVE_EXPR, do so now. */
2351 if (!n)
2353 t = copy_node (*tp);
2355 /* Remember this SAVE_EXPR. */
2356 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2357 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2358 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2360 else
2362 /* We've already walked into this SAVE_EXPR; don't do it again. */
2363 *walk_subtrees = 0;
2364 t = (tree) n->value;
2367 /* Replace this SAVE_EXPR with the copy. */
2368 *tp = t;
2371 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2372 copies the declaration and enters it in the splay_tree in DATA (which is
2373 really an `inline_data *'). */
2375 static tree
2376 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2377 void *data)
2379 inline_data *id = (inline_data *) data;
2381 /* Don't walk into types. */
2382 if (TYPE_P (*tp))
2383 *walk_subtrees = 0;
2385 else if (TREE_CODE (*tp) == LABEL_EXPR)
2387 tree decl = TREE_OPERAND (*tp, 0);
2389 /* Copy the decl and remember the copy. */
2390 insert_decl_map (id, decl,
2391 copy_decl_for_inlining (decl, DECL_CONTEXT (decl),
2392 DECL_CONTEXT (decl)));
2395 return NULL_TREE;
2398 /* Called via walk_tree when an expression is unsaved. Using the
2399 splay_tree pointed to by ST (which is really a `splay_tree'),
2400 remaps all local declarations to appropriate replacements. */
2402 static tree
2403 unsave_r (tree *tp, int *walk_subtrees, void *data)
2405 inline_data *id = (inline_data *) data;
2406 splay_tree st = id->decl_map;
2407 splay_tree_node n;
2409 /* Only a local declaration (variable or label). */
2410 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2411 || TREE_CODE (*tp) == LABEL_DECL)
2413 /* Lookup the declaration. */
2414 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2416 /* If it's there, remap it. */
2417 if (n)
2418 *tp = (tree) n->value;
2421 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2422 copy_statement_list (tp);
2423 else if (TREE_CODE (*tp) == BIND_EXPR)
2424 copy_bind_expr (tp, walk_subtrees, id);
2425 else if (TREE_CODE (*tp) == SAVE_EXPR)
2426 remap_save_expr (tp, st, walk_subtrees);
2427 else
2429 copy_tree_r (tp, walk_subtrees, NULL);
2431 /* Do whatever unsaving is required. */
2432 unsave_expr_1 (*tp);
2435 /* Keep iterating. */
2436 return NULL_TREE;
2439 /* Default lang hook for "unsave_expr_now". Copies everything in EXPR and
2440 replaces variables, labels and SAVE_EXPRs local to EXPR. */
2442 tree
2443 lhd_unsave_expr_now (tree expr)
2445 inline_data id;
2447 /* There's nothing to do for NULL_TREE. */
2448 if (expr == 0)
2449 return expr;
2451 /* Set up ID. */
2452 memset (&id, 0, sizeof (id));
2453 VARRAY_TREE_INIT (id.fns, 1, "fns");
2454 VARRAY_PUSH_TREE (id.fns, current_function_decl);
2455 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2457 /* Walk the tree once to find local labels. */
2458 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2460 /* Walk the tree again, copying, remapping, and unsaving. */
2461 walk_tree (&expr, unsave_r, &id, NULL);
2463 /* Clean up. */
2464 splay_tree_delete (id.decl_map);
2466 return expr;
2469 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2471 static tree
2472 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2474 if (*tp == data)
2475 return (tree) data;
2476 else
2477 return NULL;
2480 bool
2481 debug_find_tree (tree top, tree search)
2483 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2486 /* Declare the variables created by the inliner. Add all the variables in
2487 VARS to BIND_EXPR. */
2489 static void
2490 declare_inline_vars (tree bind_expr, tree vars)
2492 tree t;
2493 for (t = vars; t; t = TREE_CHAIN (t))
2494 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2496 add_var_to_bind_expr (bind_expr, vars);