* integrate.c (copy_decl_for_inlining): Remove invisible reference
[official-gcc.git] / gcc / tree-inline.c
blob6412ce0306d27e3de8cd75e89419e892d1ecf339
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "integrate.h"
36 #include "varray.h"
37 #include "hashtab.h"
38 #include "splay-tree.h"
39 #include "langhooks.h"
40 #include "cgraph.h"
41 #include "intl.h"
42 #include "tree-mudflap.h"
43 #include "function.h"
44 #include "diagnostic.h"
46 /* I'm not real happy about this, but we need to handle gimple and
47 non-gimple trees. */
48 #include "tree-iterator.h"
49 #include "tree-gimple.h"
51 /* 0 if we should not perform inlining.
52 1 if we should expand functions calls inline at the tree level.
53 2 if we should consider *all* functions to be inline
54 candidates. */
56 int flag_inline_trees = 0;
58 /* To Do:
60 o In order to make inlining-on-trees work, we pessimized
61 function-local static constants. In particular, they are now
62 always output, even when not addressed. Fix this by treating
63 function-local static constants just like global static
64 constants; the back-end already knows not to output them if they
65 are not needed.
67 o Provide heuristics to clamp inlining of recursive template
68 calls? */
70 /* Data required for function inlining. */
72 typedef struct inline_data
74 /* A stack of the functions we are inlining. For example, if we are
75 compiling `f', which calls `g', which calls `h', and we are
76 inlining the body of `h', the stack will contain, `h', followed
77 by `g', followed by `f'. The first few elements of the stack may
78 contain other functions that we know we should not recurse into,
79 even though they are not directly being inlined. */
80 varray_type fns;
81 /* The index of the first element of FNS that really represents an
82 inlined function. */
83 unsigned first_inlined_fn;
84 /* The label to jump to when a return statement is encountered. If
85 this value is NULL, then return statements will simply be
86 remapped as return statements, rather than as jumps. */
87 tree ret_label;
88 /* The VAR_DECL for the return value. */
89 tree retvar;
90 /* The map from local declarations in the inlined function to
91 equivalents in the function into which it is being inlined. */
92 splay_tree decl_map;
93 /* Nonzero if we are currently within the cleanup for a
94 TARGET_EXPR. */
95 int in_target_cleanup_p;
96 /* A list of the functions current function has inlined. */
97 varray_type inlined_fns;
98 /* We use the same mechanism to build clones that we do to perform
99 inlining. However, there are a few places where we need to
100 distinguish between those two situations. This flag is true if
101 we are cloning, rather than inlining. */
102 bool cloning_p;
103 /* Similarly for saving function body. */
104 bool saving_p;
105 /* Hash table used to prevent walk_tree from visiting the same node
106 umpteen million times. */
107 htab_t tree_pruner;
108 /* Callgraph node of function we are inlining into. */
109 struct cgraph_node *node;
110 /* Callgraph node of currently inlined function. */
111 struct cgraph_node *current_node;
112 /* Statement iterator. We need this so we can keep the tree in
113 gimple form when we insert the inlined function. It is not
114 used when we are not dealing with gimple trees. */
115 tree_stmt_iterator tsi;
116 } inline_data;
118 /* Prototypes. */
120 /* The approximate number of instructions per statement. This number
121 need not be particularly accurate; it is used only to make
122 decisions about when a function is too big to inline. */
123 #define INSNS_PER_STMT (10)
125 static tree copy_body_r (tree *, int *, void *);
126 static tree copy_body (inline_data *);
127 static tree expand_call_inline (tree *, int *, void *);
128 static void expand_calls_inline (tree *, inline_data *);
129 static bool inlinable_function_p (tree);
130 static tree remap_decl (tree, inline_data *);
131 static tree remap_type (tree, inline_data *);
132 static tree initialize_inlined_parameters (inline_data *, tree,
133 tree, tree, tree);
134 static void remap_block (tree *, inline_data *);
135 static tree remap_decls (tree, inline_data *);
136 static void copy_bind_expr (tree *, int *, inline_data *);
137 static tree mark_local_for_remap_r (tree *, int *, void *);
138 static tree unsave_r (tree *, int *, void *);
139 static void declare_inline_vars (tree bind_expr, tree vars);
141 /* Insert a tree->tree mapping for ID. Despite the name suggests
142 that the trees should be variables, it is used for more than that. */
144 static void
145 insert_decl_map (inline_data *id, tree key, tree value)
147 splay_tree_insert (id->decl_map, (splay_tree_key) key,
148 (splay_tree_value) value);
150 /* Always insert an identity map as well. If we see this same new
151 node again, we won't want to duplicate it a second time. */
152 if (key != value)
153 splay_tree_insert (id->decl_map, (splay_tree_key) value,
154 (splay_tree_value) value);
157 /* Remap DECL during the copying of the BLOCK tree for the function.
158 We are only called to remap local variables in the current function. */
160 static tree
161 remap_decl (tree decl, inline_data *id)
163 splay_tree_node n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
164 tree fn = VARRAY_TOP_TREE (id->fns);
166 /* See if we have remapped this declaration. If we didn't already have an
167 equivalent for this declaration, create one now. */
168 if (!n)
170 /* Make a copy of the variable or label. */
171 tree t = copy_decl_for_inlining (decl, fn, VARRAY_TREE (id->fns, 0));
173 /* Remap types, if necessary. */
174 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
175 if (TREE_CODE (t) == TYPE_DECL)
176 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
177 else if (TREE_CODE (t) == PARM_DECL)
178 DECL_ARG_TYPE_AS_WRITTEN (t)
179 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
181 /* Remap sizes as necessary. */
182 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
183 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
185 /* If fields, do likewise for offset and qualifier. */
186 if (TREE_CODE (t) == FIELD_DECL)
188 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
189 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
190 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
193 #if 0
194 /* FIXME handle anon aggrs. */
195 if (! DECL_NAME (t) && TREE_TYPE (t)
196 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
198 /* For a VAR_DECL of anonymous type, we must also copy the
199 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
200 tree members = NULL;
201 tree src;
203 for (src = DECL_ANON_UNION_ELEMS (t); src;
204 src = TREE_CHAIN (src))
206 tree member = remap_decl (TREE_VALUE (src), id);
208 if (TREE_PURPOSE (src))
209 abort ();
210 members = tree_cons (NULL, member, members);
212 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
214 #endif
216 /* Remember it, so that if we encounter this local entity
217 again we can reuse this copy. */
218 insert_decl_map (id, decl, t);
219 return t;
222 return unshare_expr ((tree) n->value);
225 static tree
226 remap_type (tree type, inline_data *id)
228 splay_tree_node node;
229 tree new, t;
231 if (type == NULL)
232 return type;
234 /* See if we have remapped this type. */
235 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
236 if (node)
237 return (tree) node->value;
239 /* The type only needs remapping if it's variably modified by a variable
240 in the function we are inlining. */
241 if (! variably_modified_type_p (type, VARRAY_TOP_TREE (id->fns)))
243 insert_decl_map (id, type, type);
244 return type;
247 /* We do need a copy. build and register it now. If this is a pointer or
248 reference type, remap the designated type and make a new pointer or
249 reference type. */
250 if (TREE_CODE (type) == POINTER_TYPE)
252 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
253 TYPE_MODE (type),
254 TYPE_REF_CAN_ALIAS_ALL (type));
255 insert_decl_map (id, type, new);
256 return new;
258 else if (TREE_CODE (type) == REFERENCE_TYPE)
260 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
261 TYPE_MODE (type),
262 TYPE_REF_CAN_ALIAS_ALL (type));
263 insert_decl_map (id, type, new);
264 return new;
266 else
267 new = copy_node (type);
269 insert_decl_map (id, type, new);
271 /* This is a new type, not a copy of an old type. Need to reassociate
272 variants. We can handle everything except the main variant lazily. */
273 t = TYPE_MAIN_VARIANT (type);
274 if (type != t)
276 t = remap_type (t, id);
277 TYPE_MAIN_VARIANT (new) = t;
278 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
279 TYPE_NEXT_VARIANT (t) = new;
281 else
283 TYPE_MAIN_VARIANT (new) = new;
284 TYPE_NEXT_VARIANT (new) = NULL;
287 /* Lazily create pointer and reference types. */
288 TYPE_POINTER_TO (new) = NULL;
289 TYPE_REFERENCE_TO (new) = NULL;
291 switch (TREE_CODE (new))
293 case INTEGER_TYPE:
294 case REAL_TYPE:
295 case ENUMERAL_TYPE:
296 case BOOLEAN_TYPE:
297 case CHAR_TYPE:
298 t = TYPE_MIN_VALUE (new);
299 if (t && TREE_CODE (t) != INTEGER_CST)
300 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
302 t = TYPE_MAX_VALUE (new);
303 if (t && TREE_CODE (t) != INTEGER_CST)
304 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
305 return new;
307 case FUNCTION_TYPE:
308 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
309 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
310 return new;
312 case ARRAY_TYPE:
313 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
314 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
315 break;
317 case RECORD_TYPE:
318 case UNION_TYPE:
319 case QUAL_UNION_TYPE:
320 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
321 break;
323 case FILE_TYPE:
324 case SET_TYPE:
325 case OFFSET_TYPE:
326 default:
327 /* Shouldn't have been thought variable sized. */
328 abort ();
331 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
332 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
334 return new;
337 static tree
338 remap_decls (tree decls, inline_data *id)
340 tree old_var;
341 tree new_decls = NULL_TREE;
343 /* Remap its variables. */
344 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
346 tree new_var;
348 /* Remap the variable. */
349 new_var = remap_decl (old_var, id);
351 /* If we didn't remap this variable, so we can't mess with its
352 TREE_CHAIN. If we remapped this variable to the return slot, it's
353 already declared somewhere else, so don't declare it here. */
354 if (!new_var || new_var == id->retvar)
356 #ifdef ENABLE_CHECKING
357 else if (!DECL_P (new_var))
358 abort ();
359 #endif
360 else
362 TREE_CHAIN (new_var) = new_decls;
363 new_decls = new_var;
367 return nreverse (new_decls);
370 /* Copy the BLOCK to contain remapped versions of the variables
371 therein. And hook the new block into the block-tree. */
373 static void
374 remap_block (tree *block, inline_data *id)
376 tree old_block;
377 tree new_block;
378 tree fn;
380 /* Make the new block. */
381 old_block = *block;
382 new_block = make_node (BLOCK);
383 TREE_USED (new_block) = TREE_USED (old_block);
384 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
385 *block = new_block;
387 /* Remap its variables. */
388 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
390 fn = VARRAY_TREE (id->fns, 0);
391 #if 1
392 /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in
393 rest_of_compilation is a good start. */
394 if (id->cloning_p)
395 /* We're building a clone; DECL_INITIAL is still
396 error_mark_node, and current_binding_level is the parm
397 binding level. */
398 lang_hooks.decls.insert_block (new_block);
399 else
401 /* Attach this new block after the DECL_INITIAL block for the
402 function into which this block is being inlined. In
403 rest_of_compilation we will straighten out the BLOCK tree. */
404 tree *first_block;
405 if (DECL_INITIAL (fn))
406 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
407 else
408 first_block = &DECL_INITIAL (fn);
409 BLOCK_CHAIN (new_block) = *first_block;
410 *first_block = new_block;
412 #endif
413 /* Remember the remapped block. */
414 insert_decl_map (id, old_block, new_block);
417 static void
418 copy_statement_list (tree *tp)
420 tree_stmt_iterator oi, ni;
421 tree new;
423 new = alloc_stmt_list ();
424 ni = tsi_start (new);
425 oi = tsi_start (*tp);
426 *tp = new;
428 for (; !tsi_end_p (oi); tsi_next (&oi))
429 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
432 static void
433 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
435 tree block = BIND_EXPR_BLOCK (*tp);
436 /* Copy (and replace) the statement. */
437 copy_tree_r (tp, walk_subtrees, NULL);
438 if (block)
440 remap_block (&block, id);
441 BIND_EXPR_BLOCK (*tp) = block;
444 if (BIND_EXPR_VARS (*tp))
445 /* This will remap a lot of the same decls again, but this should be
446 harmless. */
447 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
450 /* Called from copy_body via walk_tree. DATA is really an `inline_data *'. */
452 static tree
453 copy_body_r (tree *tp, int *walk_subtrees, void *data)
455 inline_data *id = (inline_data *) data;
456 tree fn = VARRAY_TOP_TREE (id->fns);
458 #if 0
459 /* All automatic variables should have a DECL_CONTEXT indicating
460 what function they come from. */
461 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
462 && DECL_NAMESPACE_SCOPE_P (*tp))
463 if (! DECL_EXTERNAL (*tp) && ! TREE_STATIC (*tp))
464 abort ();
465 #endif
467 /* If this is a RETURN_EXPR, change it into a MODIFY_EXPR and a
468 GOTO_EXPR with the RET_LABEL as its target. */
469 if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
471 tree return_stmt = *tp;
472 tree goto_stmt;
474 /* Build the GOTO_EXPR. */
475 tree assignment = TREE_OPERAND (return_stmt, 0);
476 goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
477 TREE_USED (id->ret_label) = 1;
479 /* If we're returning something, just turn that into an
480 assignment into the equivalent of the original
481 RESULT_DECL. */
482 if (assignment)
484 /* Do not create a statement containing a naked RESULT_DECL. */
485 if (TREE_CODE (assignment) == RESULT_DECL)
486 gimplify_stmt (&assignment);
488 *tp = build (BIND_EXPR, void_type_node, NULL, NULL, NULL);
489 append_to_statement_list (assignment, &BIND_EXPR_BODY (*tp));
490 append_to_statement_list (goto_stmt, &BIND_EXPR_BODY (*tp));
492 /* If we're not returning anything just do the jump. */
493 else
494 *tp = goto_stmt;
496 /* Local variables and labels need to be replaced by equivalent
497 variables. We don't want to copy static variables; there's only
498 one of those, no matter how many times we inline the containing
499 function. Similarly for globals from an outer function. */
500 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
502 tree new_decl;
504 /* Remap the declaration. */
505 new_decl = remap_decl (*tp, id);
506 if (! new_decl)
507 abort ();
508 /* Replace this variable with the copy. */
509 STRIP_TYPE_NOPS (new_decl);
510 *tp = new_decl;
512 #if 0
513 else if (nonstatic_local_decl_p (*tp)
514 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
515 abort ();
516 #endif
517 else if (TREE_CODE (*tp) == STATEMENT_LIST)
518 copy_statement_list (tp);
519 else if (TREE_CODE (*tp) == SAVE_EXPR)
520 remap_save_expr (tp, id->decl_map, walk_subtrees);
521 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
522 /* UNSAVE_EXPRs should not be generated until expansion time. */
523 abort ();
524 else if (TREE_CODE (*tp) == BIND_EXPR)
525 copy_bind_expr (tp, walk_subtrees, id);
526 else if (TREE_CODE (*tp) == LABELED_BLOCK_EXPR)
528 /* We need a new copy of this labeled block; the EXIT_BLOCK_EXPR
529 will refer to it, so save a copy ready for remapping. We
530 save it in the decl_map, although it isn't a decl. */
531 tree new_block = copy_node (*tp);
532 insert_decl_map (id, *tp, new_block);
533 *tp = new_block;
535 else if (TREE_CODE (*tp) == EXIT_BLOCK_EXPR)
537 splay_tree_node n
538 = splay_tree_lookup (id->decl_map,
539 (splay_tree_key) TREE_OPERAND (*tp, 0));
540 /* We _must_ have seen the enclosing LABELED_BLOCK_EXPR. */
541 if (! n)
542 abort ();
543 *tp = copy_node (*tp);
544 TREE_OPERAND (*tp, 0) = (tree) n->value;
546 /* Types may need remapping as well. */
547 else if (TYPE_P (*tp))
548 *tp = remap_type (*tp, id);
550 /* Otherwise, just copy the node. Note that copy_tree_r already
551 knows not to copy VAR_DECLs, etc., so this is safe. */
552 else
554 tree old_node = *tp;
556 if (TREE_CODE (*tp) == MODIFY_EXPR
557 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
558 && (lang_hooks.tree_inlining.auto_var_in_fn_p
559 (TREE_OPERAND (*tp, 0), fn)))
561 /* Some assignments VAR = VAR; don't generate any rtl code
562 and thus don't count as variable modification. Avoid
563 keeping bogosities like 0 = 0. */
564 tree decl = TREE_OPERAND (*tp, 0), value;
565 splay_tree_node n;
567 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
568 if (n)
570 value = (tree) n->value;
571 STRIP_TYPE_NOPS (value);
572 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
574 *tp = value;
575 return copy_body_r (tp, walk_subtrees, data);
579 else if (TREE_CODE (*tp) == ADDR_EXPR
580 && (lang_hooks.tree_inlining.auto_var_in_fn_p
581 (TREE_OPERAND (*tp, 0), fn)))
583 /* Get rid of &* from inline substitutions. It can occur when
584 someone takes the address of a parm or return slot passed by
585 invisible reference. */
586 tree decl = TREE_OPERAND (*tp, 0), value;
587 splay_tree_node n;
589 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
590 if (n)
592 value = (tree) n->value;
593 if (TREE_CODE (value) == INDIRECT_REF)
595 if (!lang_hooks.types_compatible_p
596 (TREE_TYPE (*tp), TREE_TYPE (TREE_OPERAND (value, 0))))
597 *tp = fold_convert (TREE_TYPE (*tp),
598 TREE_OPERAND (value, 0));
599 else
600 *tp = TREE_OPERAND (value, 0);
602 return copy_body_r (tp, walk_subtrees, data);
606 else if (TREE_CODE (*tp) == INDIRECT_REF)
608 /* Get rid of *& from inline substitutions that can happen when a
609 pointer argument is an ADDR_EXPR. */
610 tree decl = TREE_OPERAND (*tp, 0), value;
611 splay_tree_node n;
613 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
614 if (n)
616 value = (tree) n->value;
617 STRIP_NOPS (value);
618 if (TREE_CODE (value) == ADDR_EXPR
619 && (lang_hooks.types_compatible_p
620 (TREE_TYPE (*tp), TREE_TYPE (TREE_OPERAND (value, 0)))))
622 *tp = TREE_OPERAND (value, 0);
623 return copy_body_r (tp, walk_subtrees, data);
628 copy_tree_r (tp, walk_subtrees, NULL);
630 if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp))
632 if (id->saving_p)
634 struct cgraph_node *node;
635 struct cgraph_edge *edge;
637 for (node = id->node->next_clone; node; node = node->next_clone)
639 edge = cgraph_edge (node, old_node);
640 if (edge)
641 edge->call_expr = *tp;
642 else
643 abort ();
646 else
648 struct cgraph_edge *edge
649 = cgraph_edge (id->current_node, old_node);
651 if (edge)
652 cgraph_clone_edge (edge, id->node, *tp);
656 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
658 /* The copied TARGET_EXPR has never been expanded, even if the
659 original node was expanded already. */
660 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
662 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
663 TREE_OPERAND (*tp, 3) = NULL_TREE;
667 /* Keep iterating. */
668 return NULL_TREE;
671 /* Make a copy of the body of FN so that it can be inserted inline in
672 another function. */
674 static tree
675 copy_body (inline_data *id)
677 tree body;
678 tree fndecl = VARRAY_TOP_TREE (id->fns);
680 if (fndecl == current_function_decl
681 && cfun->saved_tree)
682 body = cfun->saved_tree;
683 else
684 body = DECL_SAVED_TREE (fndecl);
685 walk_tree (&body, copy_body_r, id, NULL);
687 return body;
690 static void
691 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
692 tree *init_stmts, tree *vars, bool *gimplify_init_stmts_p)
694 tree init_stmt;
695 tree var;
697 /* If the parameter is never assigned to, we may not need to
698 create a new variable here at all. Instead, we may be able
699 to just use the argument value. */
700 if (TREE_READONLY (p)
701 && !TREE_ADDRESSABLE (p)
702 && value && !TREE_SIDE_EFFECTS (value))
704 /* We can't risk substituting complex expressions. They
705 might contain variables that will be assigned to later.
706 Theoretically, we could check the expression to see if
707 all of the variables that determine its value are
708 read-only, but we don't bother. */
709 /* We may produce non-gimple trees by adding NOPs or introduce
710 invalid sharing when operand is not really constant.
711 It is not big deal to prohibit constant propagation here as
712 we will constant propagate in DOM1 pass anyway. */
713 if (is_gimple_min_invariant (value)
714 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p)))
716 insert_decl_map (id, p, value);
717 return;
721 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
722 here since the type of this decl must be visible to the calling
723 function. */
724 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
726 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
727 that way, when the PARM_DECL is encountered, it will be
728 automatically replaced by the VAR_DECL. */
729 insert_decl_map (id, p, var);
731 /* Declare this new variable. */
732 TREE_CHAIN (var) = *vars;
733 *vars = var;
735 /* Make gimplifier happy about this variable. */
736 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
738 /* Even if P was TREE_READONLY, the new VAR should not be.
739 In the original code, we would have constructed a
740 temporary, and then the function body would have never
741 changed the value of P. However, now, we will be
742 constructing VAR directly. The constructor body may
743 change its value multiple times as it is being
744 constructed. Therefore, it must not be TREE_READONLY;
745 the back-end assumes that TREE_READONLY variable is
746 assigned to only once. */
747 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
748 TREE_READONLY (var) = 0;
750 /* Initialize this VAR_DECL from the equivalent argument. Convert
751 the argument to the proper type in case it was promoted. */
752 if (value)
754 tree rhs = fold_convert (TREE_TYPE (var), value);
756 if (rhs == error_mark_node)
757 return;
759 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
760 keep our trees in gimple form. */
761 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
762 append_to_statement_list (init_stmt, init_stmts);
764 /* If we did not create a gimple value and we did not create a gimple
765 cast of a gimple value, then we will need to gimplify INIT_STMTS
766 at the end. Note that is_gimple_cast only checks the outer
767 tree code, not its operand. Thus the explicit check that it's
768 operand is a gimple value. */
769 if (!is_gimple_val (rhs)
770 && (!is_gimple_cast (rhs)
771 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
772 *gimplify_init_stmts_p = true;
776 /* Generate code to initialize the parameters of the function at the
777 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
779 static tree
780 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
781 tree fn, tree bind_expr)
783 tree init_stmts = NULL_TREE;
784 tree parms;
785 tree a;
786 tree p;
787 tree vars = NULL_TREE;
788 bool gimplify_init_stmts_p = false;
789 int argnum = 0;
791 /* Figure out what the parameters are. */
792 parms = DECL_ARGUMENTS (fn);
793 if (fn == current_function_decl)
794 parms = cfun->saved_args;
796 /* Loop through the parameter declarations, replacing each with an
797 equivalent VAR_DECL, appropriately initialized. */
798 for (p = parms, a = args; p;
799 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
801 tree value;
803 ++argnum;
805 /* Find the initializer. */
806 value = lang_hooks.tree_inlining.convert_parm_for_inlining
807 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
809 setup_one_parameter (id, p, value, fn, &init_stmts, &vars,
810 &gimplify_init_stmts_p);
813 /* Evaluate trailing arguments. */
814 for (; a; a = TREE_CHAIN (a))
816 tree value = TREE_VALUE (a);
817 append_to_statement_list (value, &init_stmts);
820 /* Initialize the static chain. */
821 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
822 if (p)
824 /* No static chain? Seems like a bug in tree-nested.c. */
825 if (!static_chain)
826 abort ();
828 setup_one_parameter (id, p, static_chain, fn, &init_stmts, &vars,
829 &gimplify_init_stmts_p);
832 if (gimplify_init_stmts_p)
833 gimplify_body (&init_stmts, current_function_decl);
835 declare_inline_vars (bind_expr, vars);
836 return init_stmts;
839 /* Declare a return variable to replace the RESULT_DECL for the function we
840 are calling. RETURN_SLOT_ADDR, if non-null, was a fake parameter that
841 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
842 the MODIFY_EXPR to which this call is the RHS.
844 The return value is a (possibly null) value that is the result of the
845 function as seen by the callee. *USE_P is a (possibly null) value that
846 holds the result as seen by the caller. */
848 static tree
849 declare_return_variable (inline_data *id, tree return_slot_addr,
850 tree modify_dest, tree *use_p)
852 tree callee = VARRAY_TOP_TREE (id->fns);
853 tree caller = VARRAY_TREE (id->fns, 0);
854 tree result = DECL_RESULT (callee);
855 tree callee_type = TREE_TYPE (result);
856 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
857 tree var, use;
859 /* We don't need to do anything for functions that don't return
860 anything. */
861 if (!result || VOID_TYPE_P (callee_type))
863 *use_p = NULL_TREE;
864 return NULL_TREE;
867 /* If there was a return slot, then the return value the the
868 dereferenced address of that object. */
869 if (return_slot_addr)
871 /* The front end shouldn't have used both return_slot_addr and
872 a modify expression. */
873 if (modify_dest)
874 abort ();
875 var = build_fold_indirect_ref (return_slot_addr);
876 use = NULL;
877 goto done;
880 /* All types requiring non-trivial constructors should have been handled. */
881 if (TREE_ADDRESSABLE (callee_type))
882 abort ();
884 /* Attempt to avoid creating a new temporary variable. */
885 if (modify_dest)
887 bool use_it = false;
889 /* We can't use MODIFY_DEST if there's type promotion involved. */
890 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
891 use_it = false;
893 /* ??? If we're assigning to a variable sized type, then we must
894 reuse the destination variable, because we've no good way to
895 create variable sized temporaries at this point. */
896 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
897 use_it = true;
899 /* If the callee cannot possibly modify MODIFY_DEST, then we can
900 reuse it as the result of the call directly. Don't do this if
901 it would promote MODIFY_DEST to addressable. */
902 else if (!TREE_STATIC (modify_dest)
903 && !TREE_ADDRESSABLE (modify_dest)
904 && !TREE_ADDRESSABLE (result))
905 use_it = true;
907 if (use_it)
909 var = modify_dest;
910 use = NULL;
911 goto done;
915 if (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) != INTEGER_CST)
916 abort ();
918 var = copy_decl_for_inlining (result, callee, caller);
919 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
920 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
921 = tree_cons (NULL_TREE, var,
922 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
924 /* Do not have the rest of GCC warn about this variable as it should
925 not be visible to the user. */
926 TREE_NO_WARNING (var) = 1;
928 /* Build the use expr. If the return type of the function was
929 promoted, convert it back to the expected type. */
930 use = var;
931 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
932 use = fold_convert (caller_type, var);
934 done:
935 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
936 way, when the RESULT_DECL is encountered, it will be
937 automatically replaced by the VAR_DECL. */
938 insert_decl_map (id, result, var);
940 /* Remember this so we can ignore it in remap_decls. */
941 id->retvar = var;
943 *use_p = use;
944 return var;
947 /* Returns nonzero if a function can be inlined as a tree. */
949 bool
950 tree_inlinable_function_p (tree fn)
952 return inlinable_function_p (fn);
955 static const char *inline_forbidden_reason;
957 static tree
958 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
959 void *fnp)
961 tree node = *nodep;
962 tree fn = (tree) fnp;
963 tree t;
965 switch (TREE_CODE (node))
967 case CALL_EXPR:
968 /* Refuse to inline alloca call unless user explicitly forced so as
969 this may change program's memory overhead drastically when the
970 function using alloca is called in loop. In GCC present in
971 SPEC2000 inlining into schedule_block cause it to require 2GB of
972 RAM instead of 256MB. */
973 if (alloca_call_p (node)
974 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
976 inline_forbidden_reason
977 = N_("%Jfunction '%F' can never be inlined because it uses "
978 "alloca (override using the always_inline attribute)");
979 return node;
981 t = get_callee_fndecl (node);
982 if (! t)
983 break;
985 /* We cannot inline functions that call setjmp. */
986 if (setjmp_call_p (t))
988 inline_forbidden_reason
989 = N_("%Jfunction '%F' can never be inlined because it uses setjmp");
990 return node;
993 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
994 switch (DECL_FUNCTION_CODE (t))
996 /* We cannot inline functions that take a variable number of
997 arguments. */
998 case BUILT_IN_VA_START:
999 case BUILT_IN_STDARG_START:
1000 case BUILT_IN_NEXT_ARG:
1001 case BUILT_IN_VA_END:
1002 inline_forbidden_reason
1003 = N_("%Jfunction '%F' can never be inlined because it "
1004 "uses variable argument lists");
1005 return node;
1007 case BUILT_IN_LONGJMP:
1008 /* We can't inline functions that call __builtin_longjmp at
1009 all. The non-local goto machinery really requires the
1010 destination be in a different function. If we allow the
1011 function calling __builtin_longjmp to be inlined into the
1012 function calling __builtin_setjmp, Things will Go Awry. */
1013 inline_forbidden_reason
1014 = N_("%Jfunction '%F' can never be inlined because "
1015 "it uses setjmp-longjmp exception handling");
1016 return node;
1018 case BUILT_IN_NONLOCAL_GOTO:
1019 /* Similarly. */
1020 inline_forbidden_reason
1021 = N_("%Jfunction '%F' can never be inlined because "
1022 "it uses non-local goto");
1023 return node;
1025 default:
1026 break;
1028 break;
1030 case BIND_EXPR:
1031 for (t = BIND_EXPR_VARS (node); t ; t = TREE_CHAIN (t))
1033 /* We cannot inline functions that contain other functions. */
1034 if (TREE_CODE (t) == FUNCTION_DECL && DECL_INITIAL (t))
1036 inline_forbidden_reason
1037 = N_("%Jfunction '%F' can never be inlined "
1038 "because it contains a nested function");
1039 return node;
1042 break;
1044 case GOTO_EXPR:
1045 t = TREE_OPERAND (node, 0);
1047 /* We will not inline a function which uses computed goto. The
1048 addresses of its local labels, which may be tucked into
1049 global storage, are of course not constant across
1050 instantiations, which causes unexpected behavior. */
1051 if (TREE_CODE (t) != LABEL_DECL)
1053 inline_forbidden_reason
1054 = N_("%Jfunction '%F' can never be inlined "
1055 "because it contains a computed goto");
1056 return node;
1058 break;
1060 case LABEL_EXPR:
1061 t = TREE_OPERAND (node, 0);
1062 if (DECL_NONLOCAL (t))
1064 /* We cannot inline a function that receives a non-local goto
1065 because we cannot remap the destination label used in the
1066 function that is performing the non-local goto. */
1067 inline_forbidden_reason
1068 = N_("%Jfunction '%F' can never be inlined "
1069 "because it receives a non-local goto");
1070 return node;
1072 break;
1074 case RECORD_TYPE:
1075 case UNION_TYPE:
1076 /* We cannot inline a function of the form
1078 void F (int i) { struct S { int ar[i]; } s; }
1080 Attempting to do so produces a catch-22.
1081 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1082 UNION_TYPE nodes, then it goes into infinite recursion on a
1083 structure containing a pointer to its own type. If it doesn't,
1084 then the type node for S doesn't get adjusted properly when
1085 F is inlined, and we abort in find_function_data. */
1086 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1087 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1089 inline_forbidden_reason
1090 = N_("%Jfunction '%F' can never be inlined "
1091 "because it uses variable sized variables");
1092 return node;
1095 default:
1096 break;
1099 return NULL_TREE;
1102 /* Return subexpression representing possible alloca call, if any. */
1103 static tree
1104 inline_forbidden_p (tree fndecl)
1106 location_t saved_loc = input_location;
1107 tree ret = walk_tree_without_duplicates (&DECL_SAVED_TREE (fndecl),
1108 inline_forbidden_p_1, fndecl);
1110 input_location = saved_loc;
1111 return ret;
1114 /* Returns nonzero if FN is a function that does not have any
1115 fundamental inline blocking properties. */
1117 static bool
1118 inlinable_function_p (tree fn)
1120 bool inlinable = true;
1122 /* If we've already decided this function shouldn't be inlined,
1123 there's no need to check again. */
1124 if (DECL_UNINLINABLE (fn))
1125 return false;
1127 /* See if there is any language-specific reason it cannot be
1128 inlined. (It is important that this hook be called early because
1129 in C++ it may result in template instantiation.)
1130 If the function is not inlinable for language-specific reasons,
1131 it is left up to the langhook to explain why. */
1132 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1134 /* If we don't have the function body available, we can't inline it.
1135 However, this should not be recorded since we also get here for
1136 forward declared inline functions. Therefore, return at once. */
1137 if (!DECL_SAVED_TREE (fn))
1138 return false;
1140 /* If we're not inlining at all, then we cannot inline this function. */
1141 else if (!flag_inline_trees)
1142 inlinable = false;
1144 /* Only try to inline functions if DECL_INLINE is set. This should be
1145 true for all functions declared `inline', and for all other functions
1146 as well with -finline-functions.
1148 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1149 it's the front-end that must set DECL_INLINE in this case, because
1150 dwarf2out loses if a function that does not have DECL_INLINE set is
1151 inlined anyway. That is why we have both DECL_INLINE and
1152 DECL_DECLARED_INLINE_P. */
1153 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1154 here should be redundant. */
1155 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1156 inlinable = false;
1158 else if (inline_forbidden_p (fn))
1160 /* See if we should warn about uninlinable functions. Previously,
1161 some of these warnings would be issued while trying to expand
1162 the function inline, but that would cause multiple warnings
1163 about functions that would for example call alloca. But since
1164 this a property of the function, just one warning is enough.
1165 As a bonus we can now give more details about the reason why a
1166 function is not inlinable.
1167 We only warn for functions declared `inline' by the user. */
1168 bool do_warning = (warn_inline
1169 && DECL_INLINE (fn)
1170 && DECL_DECLARED_INLINE_P (fn)
1171 && !DECL_IN_SYSTEM_HEADER (fn));
1173 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1174 sorry (inline_forbidden_reason, fn, fn);
1175 else if (do_warning)
1176 warning (inline_forbidden_reason, fn, fn);
1178 inlinable = false;
1181 /* Squirrel away the result so that we don't have to check again. */
1182 DECL_UNINLINABLE (fn) = !inlinable;
1184 return inlinable;
1187 /* Used by estimate_num_insns. Estimate number of instructions seen
1188 by given statement. */
1190 static tree
1191 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1193 int *count = data;
1194 tree x = *tp;
1196 if (TYPE_P (x) || DECL_P (x))
1198 *walk_subtrees = 0;
1199 return NULL;
1201 /* Assume that constants and references counts nothing. These should
1202 be majorized by amount of operations among them we count later
1203 and are common target of CSE and similar optimizations. */
1204 else if (TREE_CODE_CLASS (TREE_CODE (x)) == 'c'
1205 || TREE_CODE_CLASS (TREE_CODE (x)) == 'r')
1206 return NULL;
1208 switch (TREE_CODE (x))
1210 /* Containers have no cost. */
1211 case TREE_LIST:
1212 case TREE_VEC:
1213 case BLOCK:
1214 case COMPONENT_REF:
1215 case BIT_FIELD_REF:
1216 case INDIRECT_REF:
1217 case ARRAY_REF:
1218 case ARRAY_RANGE_REF:
1219 case OBJ_TYPE_REF:
1220 case EXC_PTR_EXPR: /* ??? */
1221 case FILTER_EXPR: /* ??? */
1222 case COMPOUND_EXPR:
1223 case BIND_EXPR:
1224 case LABELED_BLOCK_EXPR:
1225 case WITH_CLEANUP_EXPR:
1226 case NOP_EXPR:
1227 case VIEW_CONVERT_EXPR:
1228 case SAVE_EXPR:
1229 case UNSAVE_EXPR:
1230 case ADDR_EXPR:
1231 case COMPLEX_EXPR:
1232 case EXIT_BLOCK_EXPR:
1233 case CASE_LABEL_EXPR:
1234 case SSA_NAME:
1235 case CATCH_EXPR:
1236 case EH_FILTER_EXPR:
1237 case STATEMENT_LIST:
1238 case ERROR_MARK:
1239 case NON_LVALUE_EXPR:
1240 case ENTRY_VALUE_EXPR:
1241 case FDESC_EXPR:
1242 case VA_ARG_EXPR:
1243 case TRY_CATCH_EXPR:
1244 case TRY_FINALLY_EXPR:
1245 case LABEL_EXPR:
1246 case GOTO_EXPR:
1247 case RETURN_EXPR:
1248 case EXIT_EXPR:
1249 case LOOP_EXPR:
1250 case PHI_NODE:
1251 case WITH_SIZE_EXPR:
1252 break;
1254 /* We don't account constants for now. Assume that the cost is amortized
1255 by operations that do use them. We may re-consider this decision once
1256 we are able to optimize the tree before estimating it's size and break
1257 out static initializers. */
1258 case IDENTIFIER_NODE:
1259 case INTEGER_CST:
1260 case REAL_CST:
1261 case COMPLEX_CST:
1262 case VECTOR_CST:
1263 case STRING_CST:
1264 *walk_subtrees = 0;
1265 return NULL;
1267 /* Recognize assignments of large structures and constructors of
1268 big arrays. */
1269 case INIT_EXPR:
1270 case MODIFY_EXPR:
1271 x = TREE_OPERAND (x, 0);
1272 /* FALLTHRU */
1273 case TARGET_EXPR:
1274 case CONSTRUCTOR:
1276 HOST_WIDE_INT size;
1278 size = int_size_in_bytes (TREE_TYPE (x));
1280 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1281 *count += 10;
1282 else
1283 *count += ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1285 break;
1287 /* Assign cost of 1 to usual operations.
1288 ??? We may consider mapping RTL costs to this. */
1289 case COND_EXPR:
1291 case PLUS_EXPR:
1292 case MINUS_EXPR:
1293 case MULT_EXPR:
1295 case FIX_TRUNC_EXPR:
1296 case FIX_CEIL_EXPR:
1297 case FIX_FLOOR_EXPR:
1298 case FIX_ROUND_EXPR:
1300 case NEGATE_EXPR:
1301 case FLOAT_EXPR:
1302 case MIN_EXPR:
1303 case MAX_EXPR:
1304 case ABS_EXPR:
1306 case LSHIFT_EXPR:
1307 case RSHIFT_EXPR:
1308 case LROTATE_EXPR:
1309 case RROTATE_EXPR:
1311 case BIT_IOR_EXPR:
1312 case BIT_XOR_EXPR:
1313 case BIT_AND_EXPR:
1314 case BIT_NOT_EXPR:
1316 case TRUTH_ANDIF_EXPR:
1317 case TRUTH_ORIF_EXPR:
1318 case TRUTH_AND_EXPR:
1319 case TRUTH_OR_EXPR:
1320 case TRUTH_XOR_EXPR:
1321 case TRUTH_NOT_EXPR:
1323 case LT_EXPR:
1324 case LE_EXPR:
1325 case GT_EXPR:
1326 case GE_EXPR:
1327 case EQ_EXPR:
1328 case NE_EXPR:
1329 case ORDERED_EXPR:
1330 case UNORDERED_EXPR:
1332 case UNLT_EXPR:
1333 case UNLE_EXPR:
1334 case UNGT_EXPR:
1335 case UNGE_EXPR:
1336 case UNEQ_EXPR:
1337 case LTGT_EXPR:
1339 case CONVERT_EXPR:
1341 case CONJ_EXPR:
1343 case PREDECREMENT_EXPR:
1344 case PREINCREMENT_EXPR:
1345 case POSTDECREMENT_EXPR:
1346 case POSTINCREMENT_EXPR:
1348 case SWITCH_EXPR:
1350 case ASM_EXPR:
1352 case RESX_EXPR:
1353 *count += 1;
1354 break;
1356 /* Few special cases of expensive operations. This is useful
1357 to avoid inlining on functions having too many of these. */
1358 case TRUNC_DIV_EXPR:
1359 case CEIL_DIV_EXPR:
1360 case FLOOR_DIV_EXPR:
1361 case ROUND_DIV_EXPR:
1362 case EXACT_DIV_EXPR:
1363 case TRUNC_MOD_EXPR:
1364 case CEIL_MOD_EXPR:
1365 case FLOOR_MOD_EXPR:
1366 case ROUND_MOD_EXPR:
1367 case RDIV_EXPR:
1368 *count += 10;
1369 break;
1370 case CALL_EXPR:
1372 tree decl = get_callee_fndecl (x);
1374 if (decl && DECL_BUILT_IN (decl))
1375 switch (DECL_FUNCTION_CODE (decl))
1377 case BUILT_IN_CONSTANT_P:
1378 *walk_subtrees = 0;
1379 return NULL_TREE;
1380 case BUILT_IN_EXPECT:
1381 return NULL_TREE;
1382 default:
1383 break;
1385 *count += 10;
1386 break;
1388 default:
1389 /* Abort here se we know we don't miss any nodes. */
1390 abort ();
1392 return NULL;
1395 /* Estimate number of instructions that will be created by expanding EXPR. */
1398 estimate_num_insns (tree expr)
1400 int num = 0;
1401 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1402 return num;
1405 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1407 static tree
1408 expand_call_inline (tree *tp, int *walk_subtrees, void *data)
1410 inline_data *id;
1411 tree t;
1412 tree expr;
1413 tree stmt;
1414 tree use_retvar;
1415 tree decl;
1416 tree fn;
1417 tree arg_inits;
1418 tree *inlined_body;
1419 splay_tree st;
1420 tree args;
1421 tree return_slot_addr;
1422 tree modify_dest;
1423 location_t saved_location;
1424 struct cgraph_edge *edge;
1425 const char *reason;
1427 /* See what we've got. */
1428 id = (inline_data *) data;
1429 t = *tp;
1431 /* Set input_location here so we get the right instantiation context
1432 if we call instantiate_decl from inlinable_function_p. */
1433 saved_location = input_location;
1434 if (EXPR_HAS_LOCATION (t))
1435 input_location = EXPR_LOCATION (t);
1437 /* Recurse, but letting recursive invocations know that we are
1438 inside the body of a TARGET_EXPR. */
1439 if (TREE_CODE (*tp) == TARGET_EXPR)
1441 #if 0
1442 int i, len = first_rtl_op (TARGET_EXPR);
1444 /* We're walking our own subtrees. */
1445 *walk_subtrees = 0;
1447 /* Actually walk over them. This loop is the body of
1448 walk_trees, omitting the case where the TARGET_EXPR
1449 itself is handled. */
1450 for (i = 0; i < len; ++i)
1452 if (i == 2)
1453 ++id->in_target_cleanup_p;
1454 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
1455 id->tree_pruner);
1456 if (i == 2)
1457 --id->in_target_cleanup_p;
1460 goto egress;
1461 #endif
1464 if (TYPE_P (t))
1465 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1466 them should not be expanded. This can happen if the type is a
1467 dynamic array type, for example. */
1468 *walk_subtrees = 0;
1470 /* From here on, we're only interested in CALL_EXPRs. */
1471 if (TREE_CODE (t) != CALL_EXPR)
1472 goto egress;
1474 /* First, see if we can figure out what function is being called.
1475 If we cannot, then there is no hope of inlining the function. */
1476 fn = get_callee_fndecl (t);
1477 if (!fn)
1478 goto egress;
1480 /* Turn forward declarations into real ones. */
1481 fn = cgraph_node (fn)->decl;
1483 /* If fn is a declaration of a function in a nested scope that was
1484 globally declared inline, we don't set its DECL_INITIAL.
1485 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1486 C++ front-end uses it for cdtors to refer to their internal
1487 declarations, that are not real functions. Fortunately those
1488 don't have trees to be saved, so we can tell by checking their
1489 DECL_SAVED_TREE. */
1490 if (! DECL_INITIAL (fn)
1491 && DECL_ABSTRACT_ORIGIN (fn)
1492 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1493 fn = DECL_ABSTRACT_ORIGIN (fn);
1495 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1496 Kill this check once this is fixed. */
1497 if (!id->current_node->analyzed)
1498 goto egress;
1500 edge = cgraph_edge (id->current_node, t);
1502 /* Constant propagation on argument done during previous inlining
1503 may create new direct call. Produce an edge for it. */
1504 if (!edge)
1506 struct cgraph_node *dest = cgraph_node (fn);
1508 /* We have missing edge in the callgraph. This can happen in one case
1509 where previous inlining turned indirect call into direct call by
1510 constant propagating arguments. In all other cases we hit a bug
1511 (incorrect node sharing is most common reason for missing edges. */
1512 if (!dest->needed)
1513 abort ();
1514 cgraph_create_edge (id->node, dest, t)->inline_failed
1515 = N_("originally indirect function call not considered for inlining");
1516 goto egress;
1519 /* Don't try to inline functions that are not well-suited to
1520 inlining. */
1521 if (!cgraph_inline_p (edge, &reason))
1523 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1525 sorry ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
1526 sorry ("called from here");
1528 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1529 && !DECL_IN_SYSTEM_HEADER (fn)
1530 && strlen (reason))
1532 warning ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
1533 warning ("called from here");
1535 goto egress;
1538 #ifdef ENABLE_CHECKING
1539 if (edge->callee->decl != id->node->decl)
1540 verify_cgraph_node (edge->callee);
1541 #endif
1543 if (! lang_hooks.tree_inlining.start_inlining (fn))
1544 goto egress;
1546 /* Build a block containing code to initialize the arguments, the
1547 actual inline expansion of the body, and a label for the return
1548 statements within the function to jump to. The type of the
1549 statement expression is the return type of the function call. */
1550 stmt = NULL;
1551 expr = build (BIND_EXPR, void_type_node, NULL_TREE,
1552 stmt, make_node (BLOCK));
1553 BLOCK_ABSTRACT_ORIGIN (BIND_EXPR_BLOCK (expr)) = fn;
1555 /* Local declarations will be replaced by their equivalents in this
1556 map. */
1557 st = id->decl_map;
1558 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
1559 NULL, NULL);
1561 /* Initialize the parameters. */
1562 args = TREE_OPERAND (t, 1);
1563 return_slot_addr = NULL_TREE;
1564 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
1566 return_slot_addr = TREE_VALUE (args);
1567 args = TREE_CHAIN (args);
1568 TREE_TYPE (expr) = void_type_node;
1571 arg_inits = initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2),
1572 fn, expr);
1573 if (arg_inits)
1575 /* Expand any inlined calls in the initializers. Do this before we
1576 push FN on the stack of functions we are inlining; we want to
1577 inline calls to FN that appear in the initializers for the
1578 parameters.
1580 Note we need to save and restore the saved tree statement iterator
1581 to avoid having it clobbered by expand_calls_inline. */
1582 tree_stmt_iterator save_tsi;
1584 save_tsi = id->tsi;
1585 expand_calls_inline (&arg_inits, id);
1586 id->tsi = save_tsi;
1588 /* And add them to the tree. */
1589 append_to_statement_list (arg_inits, &BIND_EXPR_BODY (expr));
1592 /* Record the function we are about to inline so that we can avoid
1593 recursing into it. */
1594 VARRAY_PUSH_TREE (id->fns, fn);
1596 /* Record the function we are about to inline if optimize_function
1597 has not been called on it yet and we don't have it in the list. */
1598 if (! DECL_INLINED_FNS (fn))
1600 int i;
1602 for (i = VARRAY_ACTIVE_SIZE (id->inlined_fns) - 1; i >= 0; i--)
1603 if (VARRAY_TREE (id->inlined_fns, i) == fn)
1604 break;
1605 if (i < 0)
1606 VARRAY_PUSH_TREE (id->inlined_fns, fn);
1609 /* Return statements in the function body will be replaced by jumps
1610 to the RET_LABEL. */
1611 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1612 DECL_ARTIFICIAL (id->ret_label) = 1;
1613 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
1614 insert_decl_map (id, id->ret_label, id->ret_label);
1616 if (! DECL_INITIAL (fn)
1617 || TREE_CODE (DECL_INITIAL (fn)) != BLOCK)
1618 abort ();
1620 /* Find the lhs to which the result of this call is assigned. */
1621 modify_dest = tsi_stmt (id->tsi);
1622 if (TREE_CODE (modify_dest) == MODIFY_EXPR)
1623 modify_dest = TREE_OPERAND (modify_dest, 0);
1624 else
1625 modify_dest = NULL;
1627 /* Declare the return variable for the function. */
1628 decl = declare_return_variable (id, return_slot_addr,
1629 modify_dest, &use_retvar);
1631 /* After we've initialized the parameters, we insert the body of the
1632 function itself. */
1634 struct cgraph_node *old_node = id->current_node;
1636 id->current_node = edge->callee;
1637 append_to_statement_list (copy_body (id), &BIND_EXPR_BODY (expr));
1638 id->current_node = old_node;
1640 inlined_body = &BIND_EXPR_BODY (expr);
1642 /* After the body of the function comes the RET_LABEL. This must come
1643 before we evaluate the returned value below, because that evaluation
1644 may cause RTL to be generated. */
1645 if (TREE_USED (id->ret_label))
1647 tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
1648 append_to_statement_list (label, &BIND_EXPR_BODY (expr));
1651 /* Clean up. */
1652 splay_tree_delete (id->decl_map);
1653 id->decl_map = st;
1655 /* The new expression has side-effects if the old one did. */
1656 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
1658 tsi_link_before (&id->tsi, expr, TSI_SAME_STMT);
1660 /* If the inlined function returns a result that we care about,
1661 then we're going to need to splice in a MODIFY_EXPR. Otherwise
1662 the call was a standalone statement and we can just replace it
1663 with the BIND_EXPR inline representation of the called function. */
1664 if (!use_retvar || !modify_dest)
1665 *tsi_stmt_ptr (id->tsi) = build_empty_stmt ();
1666 else
1667 *tp = use_retvar;
1669 /* When we gimplify a function call, we may clear TREE_SIDE_EFFECTS on
1670 the call if it is to a "const" function. Thus the copy of
1671 TREE_SIDE_EFFECTS from the CALL_EXPR to the BIND_EXPR above with
1672 result in TREE_SIDE_EFFECTS not being set for the inlined copy of a
1673 "const" function.
1675 Unfortunately, that is wrong as inlining the function can create/expose
1676 interesting side effects (such as setting of a return value).
1678 The easiest solution is to simply recalculate TREE_SIDE_EFFECTS for
1679 the toplevel expression. */
1680 recalculate_side_effects (expr);
1682 /* Update callgraph if needed. */
1683 cgraph_remove_node (edge->callee);
1685 /* Recurse into the body of the just inlined function. */
1686 expand_calls_inline (inlined_body, id);
1687 VARRAY_POP (id->fns);
1689 /* Don't walk into subtrees. We've already handled them above. */
1690 *walk_subtrees = 0;
1692 lang_hooks.tree_inlining.end_inlining (fn);
1694 /* Keep iterating. */
1695 egress:
1696 input_location = saved_location;
1697 return NULL_TREE;
1700 static void
1701 expand_calls_inline (tree *stmt_p, inline_data *id)
1703 tree stmt = *stmt_p;
1704 enum tree_code code = TREE_CODE (stmt);
1705 int dummy;
1707 switch (code)
1709 case STATEMENT_LIST:
1711 tree_stmt_iterator i;
1712 tree new;
1714 for (i = tsi_start (stmt); !tsi_end_p (i); )
1716 id->tsi = i;
1717 expand_calls_inline (tsi_stmt_ptr (i), id);
1719 new = tsi_stmt (i);
1720 if (TREE_CODE (new) == STATEMENT_LIST)
1722 tsi_link_before (&i, new, TSI_SAME_STMT);
1723 tsi_delink (&i);
1725 else
1726 tsi_next (&i);
1729 break;
1731 case COND_EXPR:
1732 expand_calls_inline (&COND_EXPR_THEN (stmt), id);
1733 expand_calls_inline (&COND_EXPR_ELSE (stmt), id);
1734 break;
1736 case CATCH_EXPR:
1737 expand_calls_inline (&CATCH_BODY (stmt), id);
1738 break;
1740 case EH_FILTER_EXPR:
1741 expand_calls_inline (&EH_FILTER_FAILURE (stmt), id);
1742 break;
1744 case TRY_CATCH_EXPR:
1745 case TRY_FINALLY_EXPR:
1746 expand_calls_inline (&TREE_OPERAND (stmt, 0), id);
1747 expand_calls_inline (&TREE_OPERAND (stmt, 1), id);
1748 break;
1750 case BIND_EXPR:
1751 expand_calls_inline (&BIND_EXPR_BODY (stmt), id);
1752 break;
1754 case COMPOUND_EXPR:
1755 /* We're gimple. We should have gotten rid of all these. */
1756 abort ();
1758 case RETURN_EXPR:
1759 stmt_p = &TREE_OPERAND (stmt, 0);
1760 stmt = *stmt_p;
1761 if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
1762 break;
1764 /* FALLTHRU */
1766 case MODIFY_EXPR:
1767 stmt_p = &TREE_OPERAND (stmt, 1);
1768 stmt = *stmt_p;
1769 if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
1771 stmt_p = &TREE_OPERAND (stmt, 0);
1772 stmt = *stmt_p;
1774 if (TREE_CODE (stmt) != CALL_EXPR)
1775 break;
1777 /* FALLTHRU */
1779 case CALL_EXPR:
1780 expand_call_inline (stmt_p, &dummy, id);
1781 break;
1783 default:
1784 break;
1788 /* Expand calls to inline functions in the body of FN. */
1790 void
1791 optimize_inline_calls (tree fn)
1793 inline_data id;
1794 tree prev_fn;
1795 tree ifn;
1797 /* There is no point in performing inlining if errors have already
1798 occurred -- and we might crash if we try to inline invalid
1799 code. */
1800 if (errorcount || sorrycount)
1801 return;
1803 /* Clear out ID. */
1804 memset (&id, 0, sizeof (id));
1806 id.current_node = id.node = cgraph_node (fn);
1807 /* Don't allow recursion into FN. */
1808 VARRAY_TREE_INIT (id.fns, 32, "fns");
1809 VARRAY_PUSH_TREE (id.fns, fn);
1810 /* Or any functions that aren't finished yet. */
1811 prev_fn = NULL_TREE;
1812 if (current_function_decl)
1814 VARRAY_PUSH_TREE (id.fns, current_function_decl);
1815 prev_fn = current_function_decl;
1818 prev_fn = lang_hooks.tree_inlining.add_pending_fn_decls (&id.fns, prev_fn);
1820 /* Create the list of functions this call will inline. */
1821 VARRAY_TREE_INIT (id.inlined_fns, 32, "inlined_fns");
1823 /* Keep track of the low-water mark, i.e., the point where the first
1824 real inlining is represented in ID.FNS. */
1825 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1827 /* Replace all calls to inline functions with the bodies of those
1828 functions. */
1829 id.tree_pruner = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
1830 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1832 /* Clean up. */
1833 htab_delete (id.tree_pruner);
1834 ifn = make_tree_vec (VARRAY_ACTIVE_SIZE (id.inlined_fns));
1835 if (VARRAY_ACTIVE_SIZE (id.inlined_fns))
1836 memcpy (&TREE_VEC_ELT (ifn, 0), &VARRAY_TREE (id.inlined_fns, 0),
1837 VARRAY_ACTIVE_SIZE (id.inlined_fns) * sizeof (tree));
1838 DECL_INLINED_FNS (fn) = ifn;
1840 #ifdef ENABLE_CHECKING
1842 struct cgraph_edge *e;
1844 verify_cgraph_node (id.node);
1846 /* Double check that we inlined everything we are supposed to inline. */
1847 for (e = id.node->callees; e; e = e->next_callee)
1848 if (!e->inline_failed)
1849 abort ();
1851 #endif
1854 /* FN is a function that has a complete body, and CLONE is a function whose
1855 body is to be set to a copy of FN, mapping argument declarations according
1856 to the ARG_MAP splay_tree. */
1858 void
1859 clone_body (tree clone, tree fn, void *arg_map)
1861 inline_data id;
1863 /* Clone the body, as if we were making an inline call. But, remap the
1864 parameters in the callee to the parameters of caller. If there's an
1865 in-charge parameter, map it to an appropriate constant. */
1866 memset (&id, 0, sizeof (id));
1867 VARRAY_TREE_INIT (id.fns, 2, "fns");
1868 VARRAY_PUSH_TREE (id.fns, clone);
1869 VARRAY_PUSH_TREE (id.fns, fn);
1870 id.decl_map = (splay_tree)arg_map;
1872 /* Cloning is treated slightly differently from inlining. Set
1873 CLONING_P so that it's clear which operation we're performing. */
1874 id.cloning_p = true;
1876 /* Actually copy the body. */
1877 append_to_statement_list_force (copy_body (&id), &DECL_SAVED_TREE (clone));
1880 /* Save duplicate of body in FN. MAP is used to pass around splay tree
1881 used to update arguments in restore_body. */
1882 tree
1883 save_body (tree fn, tree *arg_copy)
1885 inline_data id;
1886 tree body, *parg;
1888 memset (&id, 0, sizeof (id));
1889 VARRAY_TREE_INIT (id.fns, 1, "fns");
1890 VARRAY_PUSH_TREE (id.fns, fn);
1891 id.node = cgraph_node (fn);
1892 id.saving_p = true;
1893 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
1894 *arg_copy = DECL_ARGUMENTS (fn);
1896 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
1898 tree new = copy_node (*parg);
1900 lang_hooks.dup_lang_specific_decl (new);
1901 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
1902 insert_decl_map (&id, *parg, new);
1903 TREE_CHAIN (new) = TREE_CHAIN (*parg);
1904 *parg = new;
1907 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
1909 /* Actually copy the body. */
1910 body = copy_body (&id);
1912 /* Clean up. */
1913 splay_tree_delete (id.decl_map);
1914 return body;
1917 #define WALK_SUBTREE(NODE) \
1918 do \
1920 result = walk_tree (&(NODE), func, data, htab); \
1921 if (result) \
1922 return result; \
1924 while (0)
1926 /* This is a subroutine of walk_tree that walks field of TYPE that are to
1927 be walked whenever a type is seen in the tree. Rest of operands and return
1928 value are as for walk_tree. */
1930 static tree
1931 walk_type_fields (tree type, walk_tree_fn func, void *data, void *htab)
1933 tree result = NULL_TREE;
1935 switch (TREE_CODE (type))
1937 case POINTER_TYPE:
1938 case REFERENCE_TYPE:
1939 /* We have to worry about mutually recursive pointers. These can't
1940 be written in C. They can in Ada. It's pathlogical, but
1941 there's an ACATS test (c38102a) that checks it. Deal with this
1942 by checking if we're pointing to another pointer, that one
1943 points to another pointer, that one does too, and we have no htab.
1944 If so, get a hash table. We check three levels deep to avoid
1945 the cost of the hash table if we don't need one. */
1946 if (POINTER_TYPE_P (TREE_TYPE (type))
1947 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
1948 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
1949 && !htab)
1951 result = walk_tree_without_duplicates (&TREE_TYPE (type),
1952 func, data);
1953 if (result)
1954 return result;
1956 break;
1959 /* ... fall through ... */
1961 case COMPLEX_TYPE:
1962 WALK_SUBTREE (TREE_TYPE (type));
1963 break;
1965 case METHOD_TYPE:
1966 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
1968 /* Fall through. */
1970 case FUNCTION_TYPE:
1971 WALK_SUBTREE (TREE_TYPE (type));
1973 tree arg;
1975 /* We never want to walk into default arguments. */
1976 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
1977 WALK_SUBTREE (TREE_VALUE (arg));
1979 break;
1981 case ARRAY_TYPE:
1982 /* Don't follow this nodes's type if a pointer for fear that we'll
1983 have infinite recursion. Those types are uninteresting anyway. */
1984 if (!POINTER_TYPE_P (TREE_TYPE (type))
1985 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE)
1986 WALK_SUBTREE (TREE_TYPE (type));
1987 WALK_SUBTREE (TYPE_DOMAIN (type));
1988 break;
1990 case BOOLEAN_TYPE:
1991 case ENUMERAL_TYPE:
1992 case INTEGER_TYPE:
1993 case CHAR_TYPE:
1994 case REAL_TYPE:
1995 WALK_SUBTREE (TYPE_MIN_VALUE (type));
1996 WALK_SUBTREE (TYPE_MAX_VALUE (type));
1997 break;
1999 case OFFSET_TYPE:
2000 WALK_SUBTREE (TREE_TYPE (type));
2001 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
2002 break;
2004 default:
2005 break;
2008 return NULL_TREE;
2011 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
2012 called with the DATA and the address of each sub-tree. If FUNC returns a
2013 non-NULL value, the traversal is aborted, and the value returned by FUNC
2014 is returned. If HTAB is non-NULL it is used to record the nodes visited,
2015 and to avoid visiting a node more than once. */
2017 tree
2018 walk_tree (tree *tp, walk_tree_fn func, void *data, void *htab_)
2020 htab_t htab = (htab_t) htab_;
2021 enum tree_code code;
2022 int walk_subtrees;
2023 tree result;
2025 #define WALK_SUBTREE_TAIL(NODE) \
2026 do \
2028 tp = & (NODE); \
2029 goto tail_recurse; \
2031 while (0)
2033 tail_recurse:
2034 /* Skip empty subtrees. */
2035 if (!*tp)
2036 return NULL_TREE;
2038 if (htab)
2040 void **slot;
2042 /* Don't walk the same tree twice, if the user has requested
2043 that we avoid doing so. */
2044 slot = htab_find_slot (htab, *tp, INSERT);
2045 if (*slot)
2046 return NULL_TREE;
2047 *slot = *tp;
2050 /* Call the function. */
2051 walk_subtrees = 1;
2052 result = (*func) (tp, &walk_subtrees, data);
2054 /* If we found something, return it. */
2055 if (result)
2056 return result;
2058 code = TREE_CODE (*tp);
2060 /* Even if we didn't, FUNC may have decided that there was nothing
2061 interesting below this point in the tree. */
2062 if (!walk_subtrees)
2064 if (code == TREE_LIST)
2065 /* But we still need to check our siblings. */
2066 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2067 else
2068 return NULL_TREE;
2071 result = lang_hooks.tree_inlining.walk_subtrees (tp, &walk_subtrees, func,
2072 data, htab);
2073 if (result || ! walk_subtrees)
2074 return result;
2076 /* If this is a DECL_EXPR, walk into various fields of the type that it's
2077 defining. We only want to walk into these fields of a type in this
2078 case. Note that decls get walked as part of the processing of a
2079 BIND_EXPR.
2081 ??? Precisely which fields of types that we are supposed to walk in
2082 this case vs. the normal case aren't well defined. */
2083 if (code == DECL_EXPR
2084 && TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL
2085 && TREE_CODE (TREE_TYPE (DECL_EXPR_DECL (*tp))) != ERROR_MARK)
2087 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
2089 /* Call the function for the type. See if it returns anything or
2090 doesn't want us to continue. If we are to continue, walk both
2091 the normal fields and those for the declaration case. */
2092 result = (*func) (type_p, &walk_subtrees, data);
2093 if (result || !walk_subtrees)
2094 return NULL_TREE;
2096 result = walk_type_fields (*type_p, func, data, htab_);
2097 if (result)
2098 return result;
2100 WALK_SUBTREE (TYPE_SIZE (*type_p));
2101 WALK_SUBTREE (TYPE_SIZE_UNIT (*type_p));
2103 /* If this is a record type, also walk the fields. */
2104 if (TREE_CODE (*type_p) == RECORD_TYPE
2105 || TREE_CODE (*type_p) == UNION_TYPE
2106 || TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2108 tree field;
2110 for (field = TYPE_FIELDS (*type_p); field;
2111 field = TREE_CHAIN (field))
2113 /* We'd like to look at the type of the field, but we can easily
2114 get infinite recursion. So assume it's pointed to elsewhere
2115 in the tree. Also, ignore things that aren't fields. */
2116 if (TREE_CODE (field) != FIELD_DECL)
2117 continue;
2119 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
2120 WALK_SUBTREE (DECL_SIZE (field));
2121 WALK_SUBTREE (DECL_SIZE_UNIT (field));
2122 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2123 WALK_SUBTREE (DECL_QUALIFIER (field));
2128 else if (code != EXIT_BLOCK_EXPR
2129 && code != SAVE_EXPR
2130 && code != BIND_EXPR
2131 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2133 int i, len;
2135 /* Walk over all the sub-trees of this operand. */
2136 len = first_rtl_op (code);
2137 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
2138 But, we only want to walk once. */
2139 if (code == TARGET_EXPR
2140 && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
2141 --len;
2143 /* Go through the subtrees. We need to do this in forward order so
2144 that the scope of a FOR_EXPR is handled properly. */
2145 #ifdef DEBUG_WALK_TREE
2146 for (i = 0; i < len; ++i)
2147 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2148 #else
2149 for (i = 0; i < len - 1; ++i)
2150 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2152 if (len)
2154 /* The common case is that we may tail recurse here. */
2155 if (code != BIND_EXPR
2156 && !TREE_CHAIN (*tp))
2157 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
2158 else
2159 WALK_SUBTREE (TREE_OPERAND (*tp, len - 1));
2161 #endif
2164 /* If this is a type, walk the needed fields in the type. */
2165 else if (TYPE_P (*tp))
2167 result = walk_type_fields (*tp, func, data, htab_);
2168 if (result)
2169 return result;
2171 else
2173 /* Not one of the easy cases. We must explicitly go through the
2174 children. */
2175 switch (code)
2177 case ERROR_MARK:
2178 case IDENTIFIER_NODE:
2179 case INTEGER_CST:
2180 case REAL_CST:
2181 case VECTOR_CST:
2182 case STRING_CST:
2183 case BLOCK:
2184 case PLACEHOLDER_EXPR:
2185 case SSA_NAME:
2186 case FIELD_DECL:
2187 case RESULT_DECL:
2188 /* None of thse have subtrees other than those already walked
2189 above. */
2190 break;
2192 case TREE_LIST:
2193 WALK_SUBTREE (TREE_VALUE (*tp));
2194 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2195 break;
2197 case TREE_VEC:
2199 int len = TREE_VEC_LENGTH (*tp);
2201 if (len == 0)
2202 break;
2204 /* Walk all elements but the first. */
2205 while (--len)
2206 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
2208 /* Now walk the first one as a tail call. */
2209 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
2212 case COMPLEX_CST:
2213 WALK_SUBTREE (TREE_REALPART (*tp));
2214 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
2216 case CONSTRUCTOR:
2217 WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
2219 case EXIT_BLOCK_EXPR:
2220 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 1));
2222 case SAVE_EXPR:
2223 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
2225 case BIND_EXPR:
2227 tree decl;
2228 for (decl = BIND_EXPR_VARS (*tp); decl; decl = TREE_CHAIN (decl))
2230 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
2231 into declarations that are just mentioned, rather than
2232 declared; they don't really belong to this part of the tree.
2233 And, we can see cycles: the initializer for a declaration
2234 can refer to the declaration itself. */
2235 WALK_SUBTREE (DECL_INITIAL (decl));
2236 WALK_SUBTREE (DECL_SIZE (decl));
2237 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
2239 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
2242 case STATEMENT_LIST:
2244 tree_stmt_iterator i;
2245 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
2246 WALK_SUBTREE (*tsi_stmt_ptr (i));
2248 break;
2250 default:
2251 /* ??? This could be a language-defined node. We really should make
2252 a hook for it, but right now just ignore it. */
2253 break;
2257 /* We didn't find what we were looking for. */
2258 return NULL_TREE;
2260 #undef WALK_SUBTREE
2261 #undef WALK_SUBTREE_TAIL
2264 /* Like walk_tree, but does not walk duplicate nodes more than once. */
2266 tree
2267 walk_tree_without_duplicates (tree *tp, walk_tree_fn func, void *data)
2269 tree result;
2270 htab_t htab;
2272 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
2273 result = walk_tree (tp, func, data, htab);
2274 htab_delete (htab);
2275 return result;
2278 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2280 tree
2281 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2283 enum tree_code code = TREE_CODE (*tp);
2285 /* We make copies of most nodes. */
2286 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2287 || TREE_CODE_CLASS (code) == 'c'
2288 || code == TREE_LIST
2289 || code == TREE_VEC
2290 || code == TYPE_DECL)
2292 /* Because the chain gets clobbered when we make a copy, we save it
2293 here. */
2294 tree chain = TREE_CHAIN (*tp);
2295 tree new;
2297 /* Copy the node. */
2298 new = copy_node (*tp);
2300 /* Propagate mudflap marked-ness. */
2301 if (flag_mudflap && mf_marked_p (*tp))
2302 mf_mark (new);
2304 *tp = new;
2306 /* Now, restore the chain, if appropriate. That will cause
2307 walk_tree to walk into the chain as well. */
2308 if (code == PARM_DECL || code == TREE_LIST)
2309 TREE_CHAIN (*tp) = chain;
2311 /* For now, we don't update BLOCKs when we make copies. So, we
2312 have to nullify all BIND_EXPRs. */
2313 if (TREE_CODE (*tp) == BIND_EXPR)
2314 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2317 else if (TREE_CODE_CLASS (code) == 't')
2318 *walk_subtrees = 0;
2319 else if (TREE_CODE_CLASS (code) == 'd')
2320 *walk_subtrees = 0;
2321 else if (code == STATEMENT_LIST)
2322 abort ();
2324 return NULL_TREE;
2327 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2328 information indicating to what new SAVE_EXPR this one should be mapped,
2329 use that one. Otherwise, create a new node and enter it in ST. */
2331 void
2332 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2334 splay_tree st = (splay_tree) st_;
2335 splay_tree_node n;
2336 tree t;
2338 /* See if we already encountered this SAVE_EXPR. */
2339 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2341 /* If we didn't already remap this SAVE_EXPR, do so now. */
2342 if (!n)
2344 t = copy_node (*tp);
2346 /* Remember this SAVE_EXPR. */
2347 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2348 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2349 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2351 else
2353 /* We've already walked into this SAVE_EXPR; don't do it again. */
2354 *walk_subtrees = 0;
2355 t = (tree) n->value;
2358 /* Replace this SAVE_EXPR with the copy. */
2359 *tp = t;
2362 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2363 copies the declaration and enters it in the splay_tree in DATA (which is
2364 really an `inline_data *'). */
2366 static tree
2367 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2368 void *data)
2370 inline_data *id = (inline_data *) data;
2372 /* Don't walk into types. */
2373 if (TYPE_P (*tp))
2374 *walk_subtrees = 0;
2376 else if (TREE_CODE (*tp) == LABEL_EXPR)
2378 tree decl = TREE_OPERAND (*tp, 0);
2380 /* Copy the decl and remember the copy. */
2381 insert_decl_map (id, decl,
2382 copy_decl_for_inlining (decl, DECL_CONTEXT (decl),
2383 DECL_CONTEXT (decl)));
2386 return NULL_TREE;
2389 /* Called via walk_tree when an expression is unsaved. Using the
2390 splay_tree pointed to by ST (which is really a `splay_tree'),
2391 remaps all local declarations to appropriate replacements. */
2393 static tree
2394 unsave_r (tree *tp, int *walk_subtrees, void *data)
2396 inline_data *id = (inline_data *) data;
2397 splay_tree st = id->decl_map;
2398 splay_tree_node n;
2400 /* Only a local declaration (variable or label). */
2401 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2402 || TREE_CODE (*tp) == LABEL_DECL)
2404 /* Lookup the declaration. */
2405 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2407 /* If it's there, remap it. */
2408 if (n)
2409 *tp = (tree) n->value;
2412 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2413 copy_statement_list (tp);
2414 else if (TREE_CODE (*tp) == BIND_EXPR)
2415 copy_bind_expr (tp, walk_subtrees, id);
2416 else if (TREE_CODE (*tp) == SAVE_EXPR)
2417 remap_save_expr (tp, st, walk_subtrees);
2418 else
2420 copy_tree_r (tp, walk_subtrees, NULL);
2422 /* Do whatever unsaving is required. */
2423 unsave_expr_1 (*tp);
2426 /* Keep iterating. */
2427 return NULL_TREE;
2430 /* Default lang hook for "unsave_expr_now". Copies everything in EXPR and
2431 replaces variables, labels and SAVE_EXPRs local to EXPR. */
2433 tree
2434 lhd_unsave_expr_now (tree expr)
2436 inline_data id;
2438 /* There's nothing to do for NULL_TREE. */
2439 if (expr == 0)
2440 return expr;
2442 /* Set up ID. */
2443 memset (&id, 0, sizeof (id));
2444 VARRAY_TREE_INIT (id.fns, 1, "fns");
2445 VARRAY_PUSH_TREE (id.fns, current_function_decl);
2446 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2448 /* Walk the tree once to find local labels. */
2449 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2451 /* Walk the tree again, copying, remapping, and unsaving. */
2452 walk_tree (&expr, unsave_r, &id, NULL);
2454 /* Clean up. */
2455 splay_tree_delete (id.decl_map);
2457 return expr;
2460 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2462 static tree
2463 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2465 if (*tp == data)
2466 return (tree) data;
2467 else
2468 return NULL;
2471 bool
2472 debug_find_tree (tree top, tree search)
2474 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2477 /* Declare the variables created by the inliner. Add all the variables in
2478 VARS to BIND_EXPR. */
2480 static void
2481 declare_inline_vars (tree bind_expr, tree vars)
2483 tree t;
2484 for (t = vars; t; t = TREE_CHAIN (t))
2485 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2487 add_var_to_bind_expr (bind_expr, vars);