target-supports.exp (check_effective_target_mips_soft_float): Return true for MIPS16...
[official-gcc.git] / gcc / tree-inline.c
blobe7fba9129ebf7350bbd549cca45c1146b507aa70
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "varray.h"
36 #include "hashtab.h"
37 #include "langhooks.h"
38 #include "basic-block.h"
39 #include "tree-iterator.h"
40 #include "cgraph.h"
41 #include "intl.h"
42 #include "tree-mudflap.h"
43 #include "tree-flow.h"
44 #include "function.h"
45 #include "ggc.h"
46 #include "tree-flow.h"
47 #include "diagnostic.h"
48 #include "except.h"
49 #include "debug.h"
50 #include "pointer-set.h"
51 #include "ipa-prop.h"
52 #include "value-prof.h"
53 #include "tree-pass.h"
54 #include "target.h"
55 #include "integrate.h"
57 /* I'm not real happy about this, but we need to handle gimple and
58 non-gimple trees. */
59 #include "tree-gimple.h"
61 /* Inlining, Cloning, Versioning, Parallelization
63 Inlining: a function body is duplicated, but the PARM_DECLs are
64 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
65 GIMPLE_MODIFY_STMTs that store to a dedicated returned-value variable.
66 The duplicated eh_region info of the copy will later be appended
67 to the info for the caller; the eh_region info in copied throwing
68 statements and RESX_EXPRs is adjusted accordingly.
70 Cloning: (only in C++) We have one body for a con/de/structor, and
71 multiple function decls, each with a unique parameter list.
72 Duplicate the body, using the given splay tree; some parameters
73 will become constants (like 0 or 1).
75 Versioning: a function body is duplicated and the result is a new
76 function rather than into blocks of an existing function as with
77 inlining. Some parameters will become constants.
79 Parallelization: a region of a function is duplicated resulting in
80 a new function. Variables may be replaced with complex expressions
81 to enable shared variable semantics.
83 All of these will simultaneously lookup any callgraph edges. If
84 we're going to inline the duplicated function body, and the given
85 function has some cloned callgraph nodes (one for each place this
86 function will be inlined) those callgraph edges will be duplicated.
87 If we're cloning the body, those callgraph edges will be
88 updated to point into the new body. (Note that the original
89 callgraph node and edge list will not be altered.)
91 See the CALL_EXPR handling case in copy_body_r (). */
93 /* 0 if we should not perform inlining.
94 1 if we should expand functions calls inline at the tree level.
95 2 if we should consider *all* functions to be inline
96 candidates. */
98 int flag_inline_trees = 0;
100 /* To Do:
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
107 are not needed.
109 o Provide heuristics to clamp inlining of recursive template
110 calls? */
113 /* Weights that estimate_num_insns uses for heuristics in inlining. */
115 eni_weights eni_inlining_weights;
117 /* Weights that estimate_num_insns uses to estimate the size of the
118 produced code. */
120 eni_weights eni_size_weights;
122 /* Weights that estimate_num_insns uses to estimate the time necessary
123 to execute the produced code. */
125 eni_weights eni_time_weights;
127 /* Prototypes. */
129 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
130 static tree copy_generic_body (copy_body_data *);
131 static bool inlinable_function_p (tree);
132 static void remap_block (tree *, copy_body_data *);
133 static tree remap_decls (tree, copy_body_data *);
134 static void copy_bind_expr (tree *, int *, copy_body_data *);
135 static tree mark_local_for_remap_r (tree *, int *, void *);
136 static void unsave_expr_1 (tree);
137 static tree unsave_r (tree *, int *, void *);
138 static void declare_inline_vars (tree, tree);
139 static void remap_save_expr (tree *, void *, int *);
140 static void add_lexical_block (tree current_block, tree new_block);
141 static tree copy_decl_to_var (tree, copy_body_data *);
142 static tree copy_result_decl_to_var (tree, copy_body_data *);
143 static tree copy_decl_no_change (tree, copy_body_data *);
144 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
146 /* Insert a tree->tree mapping for ID. Despite the name suggests
147 that the trees should be variables, it is used for more than that. */
149 void
150 insert_decl_map (copy_body_data *id, tree key, tree value)
152 *pointer_map_insert (id->decl_map, key) = value;
154 /* Always insert an identity map as well. If we see this same new
155 node again, we won't want to duplicate it a second time. */
156 if (key != value)
157 *pointer_map_insert (id->decl_map, value) = value;
160 /* Construct new SSA name for old NAME. ID is the inline context. */
162 static tree
163 remap_ssa_name (tree name, copy_body_data *id)
165 tree new;
166 tree *n;
168 gcc_assert (TREE_CODE (name) == SSA_NAME);
170 n = (tree *) pointer_map_contains (id->decl_map, name);
171 if (n)
172 return *n;
174 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
175 in copy_bb. */
176 new = remap_decl (SSA_NAME_VAR (name), id);
177 /* We might've substituted constant or another SSA_NAME for
178 the variable.
180 Replace the SSA name representing RESULT_DECL by variable during
181 inlining: this saves us from need to introduce PHI node in a case
182 return value is just partly initialized. */
183 if ((TREE_CODE (new) == VAR_DECL || TREE_CODE (new) == PARM_DECL)
184 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
185 || !id->transform_return_to_modify))
187 new = make_ssa_name (new, NULL);
188 insert_decl_map (id, name, new);
189 if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
191 SSA_NAME_DEF_STMT (new) = build_empty_stmt ();
192 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) == name)
193 set_default_def (SSA_NAME_VAR (new), new);
195 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new)
196 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
197 TREE_TYPE (new) = TREE_TYPE (SSA_NAME_VAR (new));
199 else
200 insert_decl_map (id, name, new);
201 return new;
204 /* Remap DECL during the copying of the BLOCK tree for the function. */
206 tree
207 remap_decl (tree decl, copy_body_data *id)
209 tree *n;
210 tree fn;
212 /* We only remap local variables in the current function. */
213 fn = id->src_fn;
215 /* See if we have remapped this declaration. */
217 n = (tree *) pointer_map_contains (id->decl_map, decl);
219 /* If we didn't already have an equivalent for this declaration,
220 create one now. */
221 if (!n)
223 /* Make a copy of the variable or label. */
224 tree t = id->copy_decl (decl, id);
226 /* Remember it, so that if we encounter this local entity again
227 we can reuse this copy. Do this early because remap_type may
228 need this decl for TYPE_STUB_DECL. */
229 insert_decl_map (id, decl, t);
231 if (!DECL_P (t))
232 return t;
234 /* Remap types, if necessary. */
235 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
236 if (TREE_CODE (t) == TYPE_DECL)
237 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
239 /* Remap sizes as necessary. */
240 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
241 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
243 /* If fields, do likewise for offset and qualifier. */
244 if (TREE_CODE (t) == FIELD_DECL)
246 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
247 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
248 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
251 if (cfun && gimple_in_ssa_p (cfun)
252 && (TREE_CODE (t) == VAR_DECL
253 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
255 tree def = gimple_default_def (id->src_cfun, decl);
256 get_var_ann (t);
257 if (TREE_CODE (decl) != PARM_DECL && def)
259 tree map = remap_ssa_name (def, id);
260 /* Watch out RESULT_DECLs whose SSA names map directly
261 to them. */
262 if (TREE_CODE (map) == SSA_NAME)
263 set_default_def (t, map);
265 add_referenced_var (t);
267 return t;
270 return unshare_expr (*n);
273 static tree
274 remap_type_1 (tree type, copy_body_data *id)
276 tree new, t;
278 /* We do need a copy. build and register it now. If this is a pointer or
279 reference type, remap the designated type and make a new pointer or
280 reference type. */
281 if (TREE_CODE (type) == POINTER_TYPE)
283 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
284 TYPE_MODE (type),
285 TYPE_REF_CAN_ALIAS_ALL (type));
286 insert_decl_map (id, type, new);
287 return new;
289 else if (TREE_CODE (type) == REFERENCE_TYPE)
291 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
292 TYPE_MODE (type),
293 TYPE_REF_CAN_ALIAS_ALL (type));
294 insert_decl_map (id, type, new);
295 return new;
297 else
298 new = copy_node (type);
300 insert_decl_map (id, type, new);
302 /* This is a new type, not a copy of an old type. Need to reassociate
303 variants. We can handle everything except the main variant lazily. */
304 t = TYPE_MAIN_VARIANT (type);
305 if (type != t)
307 t = remap_type (t, id);
308 TYPE_MAIN_VARIANT (new) = t;
309 TYPE_NEXT_VARIANT (new) = TYPE_NEXT_VARIANT (t);
310 TYPE_NEXT_VARIANT (t) = new;
312 else
314 TYPE_MAIN_VARIANT (new) = new;
315 TYPE_NEXT_VARIANT (new) = NULL;
318 if (TYPE_STUB_DECL (type))
319 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
321 /* Lazily create pointer and reference types. */
322 TYPE_POINTER_TO (new) = NULL;
323 TYPE_REFERENCE_TO (new) = NULL;
325 switch (TREE_CODE (new))
327 case INTEGER_TYPE:
328 case REAL_TYPE:
329 case FIXED_POINT_TYPE:
330 case ENUMERAL_TYPE:
331 case BOOLEAN_TYPE:
332 t = TYPE_MIN_VALUE (new);
333 if (t && TREE_CODE (t) != INTEGER_CST)
334 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
336 t = TYPE_MAX_VALUE (new);
337 if (t && TREE_CODE (t) != INTEGER_CST)
338 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
339 return new;
341 case FUNCTION_TYPE:
342 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
343 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
344 return new;
346 case ARRAY_TYPE:
347 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
348 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
349 break;
351 case RECORD_TYPE:
352 case UNION_TYPE:
353 case QUAL_UNION_TYPE:
355 tree f, nf = NULL;
357 for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
359 t = remap_decl (f, id);
360 DECL_CONTEXT (t) = new;
361 TREE_CHAIN (t) = nf;
362 nf = t;
364 TYPE_FIELDS (new) = nreverse (nf);
366 break;
368 case OFFSET_TYPE:
369 default:
370 /* Shouldn't have been thought variable sized. */
371 gcc_unreachable ();
374 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
375 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
377 return new;
380 tree
381 remap_type (tree type, copy_body_data *id)
383 tree *node;
385 if (type == NULL)
386 return type;
388 /* See if we have remapped this type. */
389 node = (tree *) pointer_map_contains (id->decl_map, type);
390 if (node)
391 return *node;
393 /* The type only needs remapping if it's variably modified. */
394 if (! variably_modified_type_p (type, id->src_fn))
396 insert_decl_map (id, type, type);
397 return type;
400 return remap_type_1 (type, id);
403 static tree
404 remap_decls (tree decls, copy_body_data *id)
406 tree old_var;
407 tree new_decls = NULL_TREE;
409 /* Remap its variables. */
410 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
412 tree new_var;
414 /* We can not chain the local static declarations into the unexpanded_var_list
415 as we can't duplicate them or break one decl rule. Go ahead and link
416 them into unexpanded_var_list. */
417 if (!auto_var_in_fn_p (old_var, id->src_fn)
418 && !DECL_EXTERNAL (old_var))
420 cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
421 cfun->unexpanded_var_list);
422 continue;
425 /* Remap the variable. */
426 new_var = remap_decl (old_var, id);
428 /* If we didn't remap this variable, so we can't mess with its
429 TREE_CHAIN. If we remapped this variable to the return slot, it's
430 already declared somewhere else, so don't declare it here. */
431 if (!new_var || new_var == id->retvar)
433 else
435 gcc_assert (DECL_P (new_var));
436 TREE_CHAIN (new_var) = new_decls;
437 new_decls = new_var;
441 return nreverse (new_decls);
444 /* Copy the BLOCK to contain remapped versions of the variables
445 therein. And hook the new block into the block-tree. */
447 static void
448 remap_block (tree *block, copy_body_data *id)
450 tree old_block;
451 tree new_block;
452 tree fn;
454 /* Make the new block. */
455 old_block = *block;
456 new_block = make_node (BLOCK);
457 TREE_USED (new_block) = TREE_USED (old_block);
458 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
459 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
460 *block = new_block;
462 /* Remap its variables. */
463 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
465 fn = id->dst_fn;
467 if (id->transform_lang_insert_block)
468 lang_hooks.decls.insert_block (new_block);
470 /* Remember the remapped block. */
471 insert_decl_map (id, old_block, new_block);
474 /* Copy the whole block tree and root it in id->block. */
475 static tree
476 remap_blocks (tree block, copy_body_data *id)
478 tree t;
479 tree new = block;
481 if (!block)
482 return NULL;
484 remap_block (&new, id);
485 gcc_assert (new != block);
486 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
487 add_lexical_block (new, remap_blocks (t, id));
488 return new;
491 static void
492 copy_statement_list (tree *tp)
494 tree_stmt_iterator oi, ni;
495 tree new;
497 new = alloc_stmt_list ();
498 ni = tsi_start (new);
499 oi = tsi_start (*tp);
500 *tp = new;
502 for (; !tsi_end_p (oi); tsi_next (&oi))
503 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
506 static void
507 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
509 tree block = BIND_EXPR_BLOCK (*tp);
510 /* Copy (and replace) the statement. */
511 copy_tree_r (tp, walk_subtrees, NULL);
512 if (block)
514 remap_block (&block, id);
515 BIND_EXPR_BLOCK (*tp) = block;
518 if (BIND_EXPR_VARS (*tp))
519 /* This will remap a lot of the same decls again, but this should be
520 harmless. */
521 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
524 /* Called from copy_body_id via walk_tree. DATA is really an
525 `copy_body_data *'. */
527 tree
528 copy_body_r (tree *tp, int *walk_subtrees, void *data)
530 copy_body_data *id = (copy_body_data *) data;
531 tree fn = id->src_fn;
532 tree new_block;
534 /* Begin by recognizing trees that we'll completely rewrite for the
535 inlining context. Our output for these trees is completely
536 different from out input (e.g. RETURN_EXPR is deleted, and morphs
537 into an edge). Further down, we'll handle trees that get
538 duplicated and/or tweaked. */
540 /* When requested, RETURN_EXPRs should be transformed to just the
541 contained GIMPLE_MODIFY_STMT. The branch semantics of the return will
542 be handled elsewhere by manipulating the CFG rather than a statement. */
543 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
545 tree assignment = TREE_OPERAND (*tp, 0);
547 /* If we're returning something, just turn that into an
548 assignment into the equivalent of the original RESULT_DECL.
549 If the "assignment" is just the result decl, the result
550 decl has already been set (e.g. a recent "foo (&result_decl,
551 ...)"); just toss the entire RETURN_EXPR. */
552 if (assignment && TREE_CODE (assignment) == GIMPLE_MODIFY_STMT)
554 /* Replace the RETURN_EXPR with (a copy of) the
555 GIMPLE_MODIFY_STMT hanging underneath. */
556 *tp = copy_node (assignment);
558 else /* Else the RETURN_EXPR returns no value. */
560 *tp = NULL;
561 return (tree) (void *)1;
564 else if (TREE_CODE (*tp) == SSA_NAME)
566 *tp = remap_ssa_name (*tp, id);
567 *walk_subtrees = 0;
568 return NULL;
571 /* Local variables and labels need to be replaced by equivalent
572 variables. We don't want to copy static variables; there's only
573 one of those, no matter how many times we inline the containing
574 function. Similarly for globals from an outer function. */
575 else if (auto_var_in_fn_p (*tp, fn))
577 tree new_decl;
579 /* Remap the declaration. */
580 new_decl = remap_decl (*tp, id);
581 gcc_assert (new_decl);
582 /* Replace this variable with the copy. */
583 STRIP_TYPE_NOPS (new_decl);
584 *tp = new_decl;
585 *walk_subtrees = 0;
587 else if (TREE_CODE (*tp) == STATEMENT_LIST)
588 copy_statement_list (tp);
589 else if (TREE_CODE (*tp) == SAVE_EXPR)
590 remap_save_expr (tp, id->decl_map, walk_subtrees);
591 else if (TREE_CODE (*tp) == LABEL_DECL
592 && (! DECL_CONTEXT (*tp)
593 || decl_function_context (*tp) == id->src_fn))
594 /* These may need to be remapped for EH handling. */
595 *tp = remap_decl (*tp, id);
596 else if (TREE_CODE (*tp) == BIND_EXPR)
597 copy_bind_expr (tp, walk_subtrees, id);
598 /* Types may need remapping as well. */
599 else if (TYPE_P (*tp))
600 *tp = remap_type (*tp, id);
602 /* If this is a constant, we have to copy the node iff the type will be
603 remapped. copy_tree_r will not copy a constant. */
604 else if (CONSTANT_CLASS_P (*tp))
606 tree new_type = remap_type (TREE_TYPE (*tp), id);
608 if (new_type == TREE_TYPE (*tp))
609 *walk_subtrees = 0;
611 else if (TREE_CODE (*tp) == INTEGER_CST)
612 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
613 TREE_INT_CST_HIGH (*tp));
614 else
616 *tp = copy_node (*tp);
617 TREE_TYPE (*tp) = new_type;
621 /* Otherwise, just copy the node. Note that copy_tree_r already
622 knows not to copy VAR_DECLs, etc., so this is safe. */
623 else
625 /* Here we handle trees that are not completely rewritten.
626 First we detect some inlining-induced bogosities for
627 discarding. */
628 if (TREE_CODE (*tp) == GIMPLE_MODIFY_STMT
629 && GIMPLE_STMT_OPERAND (*tp, 0) == GIMPLE_STMT_OPERAND (*tp, 1)
630 && (auto_var_in_fn_p (GIMPLE_STMT_OPERAND (*tp, 0), fn)))
632 /* Some assignments VAR = VAR; don't generate any rtl code
633 and thus don't count as variable modification. Avoid
634 keeping bogosities like 0 = 0. */
635 tree decl = GIMPLE_STMT_OPERAND (*tp, 0), value;
636 tree *n;
638 n = (tree *) pointer_map_contains (id->decl_map, decl);
639 if (n)
641 value = *n;
642 STRIP_TYPE_NOPS (value);
643 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
645 *tp = build_empty_stmt ();
646 return copy_body_r (tp, walk_subtrees, data);
650 else if (TREE_CODE (*tp) == INDIRECT_REF)
652 /* Get rid of *& from inline substitutions that can happen when a
653 pointer argument is an ADDR_EXPR. */
654 tree decl = TREE_OPERAND (*tp, 0);
655 tree *n;
657 n = (tree *) pointer_map_contains (id->decl_map, decl);
658 if (n)
660 tree new;
661 tree old;
662 /* If we happen to get an ADDR_EXPR in n->value, strip
663 it manually here as we'll eventually get ADDR_EXPRs
664 which lie about their types pointed to. In this case
665 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
666 but we absolutely rely on that. As fold_indirect_ref
667 does other useful transformations, try that first, though. */
668 tree type = TREE_TYPE (TREE_TYPE (*n));
669 new = unshare_expr (*n);
670 old = *tp;
671 *tp = fold_indirect_ref_1 (type, new);
672 if (! *tp)
674 if (TREE_CODE (new) == ADDR_EXPR)
675 *tp = TREE_OPERAND (new, 0);
676 else
678 *tp = build1 (INDIRECT_REF, type, new);
679 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
682 *walk_subtrees = 0;
683 return NULL;
687 /* Here is the "usual case". Copy this tree node, and then
688 tweak some special cases. */
689 copy_tree_r (tp, walk_subtrees, NULL);
691 /* Global variables we didn't seen yet needs to go into referenced
692 vars. */
693 if (gimple_in_ssa_p (cfun) && TREE_CODE (*tp) == VAR_DECL)
694 add_referenced_var (*tp);
696 /* If EXPR has block defined, map it to newly constructed block.
697 When inlining we want EXPRs without block appear in the block
698 of function call. */
699 if (EXPR_P (*tp) || GIMPLE_STMT_P (*tp))
701 new_block = id->block;
702 if (TREE_BLOCK (*tp))
704 tree *n;
705 n = (tree *) pointer_map_contains (id->decl_map,
706 TREE_BLOCK (*tp));
707 gcc_assert (n);
708 new_block = *n;
710 TREE_BLOCK (*tp) = new_block;
713 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
714 TREE_OPERAND (*tp, 0) =
715 build_int_cst
716 (NULL_TREE,
717 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
719 if (!GIMPLE_TUPLE_P (*tp) && TREE_CODE (*tp) != OMP_CLAUSE)
720 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
722 /* The copied TARGET_EXPR has never been expanded, even if the
723 original node was expanded already. */
724 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
726 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
727 TREE_OPERAND (*tp, 3) = NULL_TREE;
730 /* Variable substitution need not be simple. In particular, the
731 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
732 and friends are up-to-date. */
733 else if (TREE_CODE (*tp) == ADDR_EXPR)
735 int invariant = TREE_INVARIANT (*tp);
736 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
737 /* Handle the case where we substituted an INDIRECT_REF
738 into the operand of the ADDR_EXPR. */
739 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
740 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
741 else
742 recompute_tree_invariant_for_addr_expr (*tp);
743 /* If this used to be invariant, but is not any longer,
744 then regimplification is probably needed. */
745 if (invariant && !TREE_INVARIANT (*tp))
746 id->regimplify = true;
747 *walk_subtrees = 0;
751 /* Keep iterating. */
752 return NULL_TREE;
755 /* Copy basic block, scale profile accordingly. Edges will be taken care of
756 later */
758 static basic_block
759 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
761 block_stmt_iterator bsi, copy_bsi;
762 basic_block copy_basic_block;
764 /* create_basic_block() will append every new block to
765 basic_block_info automatically. */
766 copy_basic_block = create_basic_block (NULL, (void *) 0,
767 (basic_block) bb->prev_bb->aux);
768 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
770 /* We are going to rebuild frequencies from scratch. These values have just
771 small importance to drive canonicalize_loop_headers. */
772 copy_basic_block->frequency = ((gcov_type)bb->frequency
773 * frequency_scale / REG_BR_PROB_BASE);
774 if (copy_basic_block->frequency > BB_FREQ_MAX)
775 copy_basic_block->frequency = BB_FREQ_MAX;
776 copy_bsi = bsi_start (copy_basic_block);
778 for (bsi = bsi_start (bb);
779 !bsi_end_p (bsi); bsi_next (&bsi))
781 tree stmt = bsi_stmt (bsi);
782 tree orig_stmt = stmt;
784 id->regimplify = false;
785 walk_tree (&stmt, copy_body_r, id, NULL);
787 /* RETURN_EXPR might be removed,
788 this is signalled by making stmt pointer NULL. */
789 if (stmt)
791 tree call, decl;
793 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
795 /* With return slot optimization we can end up with
796 non-gimple (foo *)&this->m, fix that here. */
797 if ((TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
798 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
799 && !is_gimple_val (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0)))
800 || id->regimplify)
801 gimplify_stmt (&stmt);
803 bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
805 /* Process new statement. gimplify_stmt possibly turned statement
806 into multiple statements, we need to process all of them. */
807 while (!bsi_end_p (copy_bsi))
809 tree *stmtp = bsi_stmt_ptr (copy_bsi);
810 tree stmt = *stmtp;
811 call = get_call_expr_in (stmt);
813 if (call && CALL_EXPR_VA_ARG_PACK (call) && id->call_expr)
815 /* __builtin_va_arg_pack () should be replaced by
816 all arguments corresponding to ... in the caller. */
817 tree p, *argarray, new_call, *call_ptr;
818 int nargs = call_expr_nargs (id->call_expr);
820 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
821 nargs--;
823 argarray = (tree *) alloca ((nargs + call_expr_nargs (call))
824 * sizeof (tree));
826 memcpy (argarray, CALL_EXPR_ARGP (call),
827 call_expr_nargs (call) * sizeof (*argarray));
828 memcpy (argarray + call_expr_nargs (call),
829 CALL_EXPR_ARGP (id->call_expr)
830 + (call_expr_nargs (id->call_expr) - nargs),
831 nargs * sizeof (*argarray));
833 new_call = build_call_array (TREE_TYPE (call),
834 CALL_EXPR_FN (call),
835 nargs + call_expr_nargs (call),
836 argarray);
837 /* Copy all CALL_EXPR flags, locus and block, except
838 CALL_EXPR_VA_ARG_PACK flag. */
839 CALL_EXPR_STATIC_CHAIN (new_call)
840 = CALL_EXPR_STATIC_CHAIN (call);
841 CALL_EXPR_TAILCALL (new_call) = CALL_EXPR_TAILCALL (call);
842 CALL_EXPR_RETURN_SLOT_OPT (new_call)
843 = CALL_EXPR_RETURN_SLOT_OPT (call);
844 CALL_FROM_THUNK_P (new_call) = CALL_FROM_THUNK_P (call);
845 CALL_CANNOT_INLINE_P (new_call)
846 = CALL_CANNOT_INLINE_P (call);
847 TREE_NOTHROW (new_call) = TREE_NOTHROW (call);
848 SET_EXPR_LOCUS (new_call, EXPR_LOCUS (call));
849 TREE_BLOCK (new_call) = TREE_BLOCK (call);
851 call_ptr = stmtp;
852 if (TREE_CODE (*call_ptr) == GIMPLE_MODIFY_STMT)
853 call_ptr = &GIMPLE_STMT_OPERAND (*call_ptr, 1);
854 if (TREE_CODE (*call_ptr) == WITH_SIZE_EXPR)
855 call_ptr = &TREE_OPERAND (*call_ptr, 0);
856 gcc_assert (*call_ptr == call);
857 if (call_ptr == stmtp)
859 bsi_replace (&copy_bsi, new_call, true);
860 stmtp = bsi_stmt_ptr (copy_bsi);
861 stmt = *stmtp;
863 else
865 *call_ptr = new_call;
866 stmt = *stmtp;
867 update_stmt (stmt);
870 else if (call
871 && id->call_expr
872 && (decl = get_callee_fndecl (call))
873 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
874 && DECL_FUNCTION_CODE (decl)
875 == BUILT_IN_VA_ARG_PACK_LEN)
877 /* __builtin_va_arg_pack_len () should be replaced by
878 the number of anonymous arguments. */
879 int nargs = call_expr_nargs (id->call_expr);
880 tree count, *call_ptr, p;
882 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
883 nargs--;
885 count = build_int_cst (integer_type_node, nargs);
886 call_ptr = stmtp;
887 if (TREE_CODE (*call_ptr) == GIMPLE_MODIFY_STMT)
888 call_ptr = &GIMPLE_STMT_OPERAND (*call_ptr, 1);
889 if (TREE_CODE (*call_ptr) == WITH_SIZE_EXPR)
890 call_ptr = &TREE_OPERAND (*call_ptr, 0);
891 gcc_assert (*call_ptr == call && call_ptr != stmtp);
892 *call_ptr = count;
893 stmt = *stmtp;
894 update_stmt (stmt);
895 call = NULL_TREE;
898 /* Statements produced by inlining can be unfolded, especially
899 when we constant propagated some operands. We can't fold
900 them right now for two reasons:
901 1) folding require SSA_NAME_DEF_STMTs to be correct
902 2) we can't change function calls to builtins.
903 So we just mark statement for later folding. We mark
904 all new statements, instead just statements that has changed
905 by some nontrivial substitution so even statements made
906 foldable indirectly are updated. If this turns out to be
907 expensive, copy_body can be told to watch for nontrivial
908 changes. */
909 if (id->statements_to_fold)
910 pointer_set_insert (id->statements_to_fold, stmt);
911 /* We're duplicating a CALL_EXPR. Find any corresponding
912 callgraph edges and update or duplicate them. */
913 if (call && (decl = get_callee_fndecl (call)))
915 struct cgraph_node *node;
916 struct cgraph_edge *edge;
918 switch (id->transform_call_graph_edges)
920 case CB_CGE_DUPLICATE:
921 edge = cgraph_edge (id->src_node, orig_stmt);
922 if (edge)
923 cgraph_clone_edge (edge, id->dst_node, stmt,
924 REG_BR_PROB_BASE, 1, edge->frequency, true);
925 break;
927 case CB_CGE_MOVE_CLONES:
928 for (node = id->dst_node->next_clone;
929 node;
930 node = node->next_clone)
932 edge = cgraph_edge (node, orig_stmt);
933 gcc_assert (edge);
934 cgraph_set_call_stmt (edge, stmt);
936 /* FALLTHRU */
938 case CB_CGE_MOVE:
939 edge = cgraph_edge (id->dst_node, orig_stmt);
940 if (edge)
941 cgraph_set_call_stmt (edge, stmt);
942 break;
944 default:
945 gcc_unreachable ();
948 /* If you think we can abort here, you are wrong.
949 There is no region 0 in tree land. */
950 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
951 != 0);
953 if (tree_could_throw_p (stmt)
954 /* When we are cloning for inlining, we are supposed to
955 construct a clone that calls precisely the same functions
956 as original. However IPA optimizers might've proved
957 earlier some function calls as non-trapping that might
958 render some basic blocks dead that might become
959 unreachable.
961 We can't update SSA with unreachable blocks in CFG and thus
962 we prevent the scenario by preserving even the "dead" eh
963 edges until the point they are later removed by
964 fixup_cfg pass. */
965 || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
966 && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
968 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
969 /* Add an entry for the copied tree in the EH hashtable.
970 When cloning or versioning, use the hashtable in
971 cfun, and just copy the EH number. When inlining, use the
972 hashtable in the caller, and adjust the region number. */
973 if (region > 0)
974 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
976 /* If this tree doesn't have a region associated with it,
977 and there is a "current region,"
978 then associate this tree with the current region
979 and add edges associated with this region. */
980 if ((lookup_stmt_eh_region_fn (id->src_cfun,
981 orig_stmt) <= 0
982 && id->eh_region > 0)
983 && tree_could_throw_p (stmt))
984 add_stmt_to_eh_region (stmt, id->eh_region);
986 if (gimple_in_ssa_p (cfun))
988 ssa_op_iter i;
989 tree def;
991 find_new_referenced_vars (bsi_stmt_ptr (copy_bsi));
992 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
993 if (TREE_CODE (def) == SSA_NAME)
994 SSA_NAME_DEF_STMT (def) = stmt;
996 bsi_next (&copy_bsi);
998 copy_bsi = bsi_last (copy_basic_block);
1001 return copy_basic_block;
1004 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1005 form is quite easy, since dominator relationship for old basic blocks does
1006 not change.
1008 There is however exception where inlining might change dominator relation
1009 across EH edges from basic block within inlined functions destinating
1010 to landing pads in function we inline into.
1012 The function fills in PHI_RESULTs of such PHI nodes if they refer
1013 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1014 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1015 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1016 set, and this means that there will be no overlapping live ranges
1017 for the underlying symbol.
1019 This might change in future if we allow redirecting of EH edges and
1020 we might want to change way build CFG pre-inlining to include
1021 all the possible edges then. */
1022 static void
1023 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1024 bool can_throw, bool nonlocal_goto)
1026 edge e;
1027 edge_iterator ei;
1029 FOR_EACH_EDGE (e, ei, bb->succs)
1030 if (!e->dest->aux
1031 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1033 tree phi;
1035 gcc_assert (e->flags & EDGE_ABNORMAL);
1036 if (!nonlocal_goto)
1037 gcc_assert (e->flags & EDGE_EH);
1038 if (!can_throw)
1039 gcc_assert (!(e->flags & EDGE_EH));
1040 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
1042 edge re;
1044 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1045 gcc_assert (!e->dest->aux);
1047 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
1048 (PHI_RESULT (phi)));
1050 if (!is_gimple_reg (PHI_RESULT (phi)))
1052 mark_sym_for_renaming
1053 (SSA_NAME_VAR (PHI_RESULT (phi)));
1054 continue;
1057 re = find_edge (ret_bb, e->dest);
1058 gcc_assert (re);
1059 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1060 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1062 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1063 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1068 /* Copy edges from BB into its copy constructed earlier, scale profile
1069 accordingly. Edges will be taken care of later. Assume aux
1070 pointers to point to the copies of each BB. */
1071 static void
1072 copy_edges_for_bb (basic_block bb, int count_scale, basic_block ret_bb)
1074 basic_block new_bb = (basic_block) bb->aux;
1075 edge_iterator ei;
1076 edge old_edge;
1077 block_stmt_iterator bsi;
1078 int flags;
1080 /* Use the indices from the original blocks to create edges for the
1081 new ones. */
1082 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1083 if (!(old_edge->flags & EDGE_EH))
1085 edge new;
1087 flags = old_edge->flags;
1089 /* Return edges do get a FALLTHRU flag when the get inlined. */
1090 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1091 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1092 flags |= EDGE_FALLTHRU;
1093 new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1094 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1095 new->probability = old_edge->probability;
1098 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1099 return;
1101 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
1103 tree copy_stmt;
1104 bool can_throw, nonlocal_goto;
1106 copy_stmt = bsi_stmt (bsi);
1107 update_stmt (copy_stmt);
1108 if (gimple_in_ssa_p (cfun))
1109 mark_symbols_for_renaming (copy_stmt);
1110 /* Do this before the possible split_block. */
1111 bsi_next (&bsi);
1113 /* If this tree could throw an exception, there are two
1114 cases where we need to add abnormal edge(s): the
1115 tree wasn't in a region and there is a "current
1116 region" in the caller; or the original tree had
1117 EH edges. In both cases split the block after the tree,
1118 and add abnormal edge(s) as needed; we need both
1119 those from the callee and the caller.
1120 We check whether the copy can throw, because the const
1121 propagation can change an INDIRECT_REF which throws
1122 into a COMPONENT_REF which doesn't. If the copy
1123 can throw, the original could also throw. */
1125 can_throw = tree_can_throw_internal (copy_stmt);
1126 nonlocal_goto = tree_can_make_abnormal_goto (copy_stmt);
1128 if (can_throw || nonlocal_goto)
1130 if (!bsi_end_p (bsi))
1131 /* Note that bb's predecessor edges aren't necessarily
1132 right at this point; split_block doesn't care. */
1134 edge e = split_block (new_bb, copy_stmt);
1136 new_bb = e->dest;
1137 new_bb->aux = e->src->aux;
1138 bsi = bsi_start (new_bb);
1142 if (can_throw)
1143 make_eh_edges (copy_stmt);
1145 if (nonlocal_goto)
1146 make_abnormal_goto_edges (bb_for_stmt (copy_stmt), true);
1148 if ((can_throw || nonlocal_goto)
1149 && gimple_in_ssa_p (cfun))
1150 update_ssa_across_abnormal_edges (bb_for_stmt (copy_stmt), ret_bb,
1151 can_throw, nonlocal_goto);
1155 /* Copy the PHIs. All blocks and edges are copied, some blocks
1156 was possibly split and new outgoing EH edges inserted.
1157 BB points to the block of original function and AUX pointers links
1158 the original and newly copied blocks. */
1160 static void
1161 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1163 basic_block new_bb = bb->aux;
1164 edge_iterator ei;
1165 tree phi;
1167 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1169 tree res = PHI_RESULT (phi);
1170 tree new_res = res;
1171 tree new_phi;
1172 edge new_edge;
1174 if (is_gimple_reg (res))
1176 walk_tree (&new_res, copy_body_r, id, NULL);
1177 SSA_NAME_DEF_STMT (new_res)
1178 = new_phi = create_phi_node (new_res, new_bb);
1179 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1181 edge old_edge = find_edge (new_edge->src->aux, bb);
1182 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1183 tree new_arg = arg;
1185 walk_tree (&new_arg, copy_body_r, id, NULL);
1186 gcc_assert (new_arg);
1187 /* With return slot optimization we can end up with
1188 non-gimple (foo *)&this->m, fix that here. */
1189 if (TREE_CODE (new_arg) != SSA_NAME
1190 && TREE_CODE (new_arg) != FUNCTION_DECL
1191 && !is_gimple_val (new_arg))
1193 tree stmts = NULL_TREE;
1194 new_arg = force_gimple_operand (new_arg, &stmts,
1195 true, NULL);
1196 bsi_insert_on_edge_immediate (new_edge, stmts);
1198 add_phi_arg (new_phi, new_arg, new_edge);
1204 /* Wrapper for remap_decl so it can be used as a callback. */
1205 static tree
1206 remap_decl_1 (tree decl, void *data)
1208 return remap_decl (decl, (copy_body_data *) data);
1211 /* Build struct function and associated datastructures for the new clone
1212 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1214 static void
1215 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1216 int frequency)
1218 struct function *new_cfun
1219 = (struct function *) ggc_alloc_cleared (sizeof (struct function));
1220 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1221 int count_scale, frequency_scale;
1223 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1224 count_scale = (REG_BR_PROB_BASE * count
1225 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1226 else
1227 count_scale = 1;
1229 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1230 frequency_scale = (REG_BR_PROB_BASE * frequency
1232 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1233 else
1234 frequency_scale = count_scale;
1236 /* Register specific tree functions. */
1237 tree_register_cfg_hooks ();
1238 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
1239 new_cfun->funcdef_no = get_next_funcdef_no ();
1240 VALUE_HISTOGRAMS (new_cfun) = NULL;
1241 new_cfun->unexpanded_var_list = NULL;
1242 new_cfun->cfg = NULL;
1243 new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
1244 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
1245 push_cfun (new_cfun);
1246 init_empty_tree_cfg ();
1248 ENTRY_BLOCK_PTR->count =
1249 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1250 REG_BR_PROB_BASE);
1251 ENTRY_BLOCK_PTR->frequency =
1252 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1253 frequency_scale / REG_BR_PROB_BASE);
1254 EXIT_BLOCK_PTR->count =
1255 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1256 REG_BR_PROB_BASE);
1257 EXIT_BLOCK_PTR->frequency =
1258 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1259 frequency_scale / REG_BR_PROB_BASE);
1260 if (src_cfun->eh)
1261 init_eh_for_function ();
1263 if (src_cfun->gimple_df)
1265 init_tree_ssa ();
1266 cfun->gimple_df->in_ssa_p = true;
1267 init_ssa_operands ();
1269 pop_cfun ();
1272 /* Make a copy of the body of FN so that it can be inserted inline in
1273 another function. Walks FN via CFG, returns new fndecl. */
1275 static tree
1276 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
1277 basic_block entry_block_map, basic_block exit_block_map)
1279 tree callee_fndecl = id->src_fn;
1280 /* Original cfun for the callee, doesn't change. */
1281 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1282 struct function *cfun_to_copy;
1283 basic_block bb;
1284 tree new_fndecl = NULL;
1285 int count_scale, frequency_scale;
1286 int last;
1288 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1289 count_scale = (REG_BR_PROB_BASE * count
1290 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1291 else
1292 count_scale = 1;
1294 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1295 frequency_scale = (REG_BR_PROB_BASE * frequency
1297 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1298 else
1299 frequency_scale = count_scale;
1301 /* Register specific tree functions. */
1302 tree_register_cfg_hooks ();
1304 /* Must have a CFG here at this point. */
1305 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1306 (DECL_STRUCT_FUNCTION (callee_fndecl)));
1308 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1311 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1312 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
1313 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1314 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1316 /* Duplicate any exception-handling regions. */
1317 if (cfun->eh)
1319 id->eh_region_offset
1320 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
1321 0, id->eh_region);
1323 /* Use aux pointers to map the original blocks to copy. */
1324 FOR_EACH_BB_FN (bb, cfun_to_copy)
1326 basic_block new = copy_bb (id, bb, frequency_scale, count_scale);
1327 bb->aux = new;
1328 new->aux = bb;
1331 last = last_basic_block;
1332 /* Now that we've duplicated the blocks, duplicate their edges. */
1333 FOR_ALL_BB_FN (bb, cfun_to_copy)
1334 copy_edges_for_bb (bb, count_scale, exit_block_map);
1335 if (gimple_in_ssa_p (cfun))
1336 FOR_ALL_BB_FN (bb, cfun_to_copy)
1337 copy_phis_for_bb (bb, id);
1338 FOR_ALL_BB_FN (bb, cfun_to_copy)
1340 ((basic_block)bb->aux)->aux = NULL;
1341 bb->aux = NULL;
1343 /* Zero out AUX fields of newly created block during EH edge
1344 insertion. */
1345 for (; last < last_basic_block; last++)
1346 BASIC_BLOCK (last)->aux = NULL;
1347 entry_block_map->aux = NULL;
1348 exit_block_map->aux = NULL;
1350 return new_fndecl;
1353 /* Make a copy of the body of FN so that it can be inserted inline in
1354 another function. */
1356 static tree
1357 copy_generic_body (copy_body_data *id)
1359 tree body;
1360 tree fndecl = id->src_fn;
1362 body = DECL_SAVED_TREE (fndecl);
1363 walk_tree (&body, copy_body_r, id, NULL);
1365 return body;
1368 static tree
1369 copy_body (copy_body_data *id, gcov_type count, int frequency,
1370 basic_block entry_block_map, basic_block exit_block_map)
1372 tree fndecl = id->src_fn;
1373 tree body;
1375 /* If this body has a CFG, walk CFG and copy. */
1376 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1377 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1379 return body;
1382 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
1383 defined in function FN, or of a data member thereof. */
1385 static bool
1386 self_inlining_addr_expr (tree value, tree fn)
1388 tree var;
1390 if (TREE_CODE (value) != ADDR_EXPR)
1391 return false;
1393 var = get_base_address (TREE_OPERAND (value, 0));
1395 return var && auto_var_in_fn_p (var, fn);
1398 static void
1399 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
1400 basic_block bb, tree *vars)
1402 tree init_stmt;
1403 tree var;
1404 tree var_sub;
1405 tree rhs = value;
1406 tree def = (gimple_in_ssa_p (cfun)
1407 ? gimple_default_def (id->src_cfun, p) : NULL);
1409 if (value
1410 && value != error_mark_node
1411 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
1412 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
1414 /* If the parameter is never assigned to, has no SSA_NAMEs created,
1415 we may not need to create a new variable here at all. Instead, we may
1416 be able to just use the argument value. */
1417 if (TREE_READONLY (p)
1418 && !TREE_ADDRESSABLE (p)
1419 && value && !TREE_SIDE_EFFECTS (value)
1420 && !def)
1422 /* We may produce non-gimple trees by adding NOPs or introduce
1423 invalid sharing when operand is not really constant.
1424 It is not big deal to prohibit constant propagation here as
1425 we will constant propagate in DOM1 pass anyway. */
1426 if (is_gimple_min_invariant (value)
1427 && useless_type_conversion_p (TREE_TYPE (p),
1428 TREE_TYPE (value))
1429 /* We have to be very careful about ADDR_EXPR. Make sure
1430 the base variable isn't a local variable of the inlined
1431 function, e.g., when doing recursive inlining, direct or
1432 mutually-recursive or whatever, which is why we don't
1433 just test whether fn == current_function_decl. */
1434 && ! self_inlining_addr_expr (value, fn))
1436 insert_decl_map (id, p, value);
1437 return;
1441 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1442 here since the type of this decl must be visible to the calling
1443 function. */
1444 var = copy_decl_to_var (p, id);
1445 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
1447 get_var_ann (var);
1448 add_referenced_var (var);
1451 /* See if the frontend wants to pass this by invisible reference. If
1452 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1453 replace uses of the PARM_DECL with dereferences. */
1454 if (TREE_TYPE (var) != TREE_TYPE (p)
1455 && POINTER_TYPE_P (TREE_TYPE (var))
1456 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1458 insert_decl_map (id, var, var);
1459 var_sub = build_fold_indirect_ref (var);
1461 else
1462 var_sub = var;
1464 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1465 that way, when the PARM_DECL is encountered, it will be
1466 automatically replaced by the VAR_DECL. */
1467 insert_decl_map (id, p, var_sub);
1469 /* Declare this new variable. */
1470 TREE_CHAIN (var) = *vars;
1471 *vars = var;
1473 /* Make gimplifier happy about this variable. */
1474 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1476 /* Even if P was TREE_READONLY, the new VAR should not be.
1477 In the original code, we would have constructed a
1478 temporary, and then the function body would have never
1479 changed the value of P. However, now, we will be
1480 constructing VAR directly. The constructor body may
1481 change its value multiple times as it is being
1482 constructed. Therefore, it must not be TREE_READONLY;
1483 the back-end assumes that TREE_READONLY variable is
1484 assigned to only once. */
1485 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1486 TREE_READONLY (var) = 0;
1488 /* If there is no setup required and we are in SSA, take the easy route
1489 replacing all SSA names representing the function parameter by the
1490 SSA name passed to function.
1492 We need to construct map for the variable anyway as it might be used
1493 in different SSA names when parameter is set in function.
1495 FIXME: This usually kills the last connection in between inlined
1496 function parameter and the actual value in debug info. Can we do
1497 better here? If we just inserted the statement, copy propagation
1498 would kill it anyway as it always did in older versions of GCC.
1500 We might want to introduce a notion that single SSA_NAME might
1501 represent multiple variables for purposes of debugging. */
1502 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
1503 && (TREE_CODE (rhs) == SSA_NAME
1504 || is_gimple_min_invariant (rhs))
1505 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1507 insert_decl_map (id, def, rhs);
1508 return;
1511 /* Initialize this VAR_DECL from the equivalent argument. Convert
1512 the argument to the proper type in case it was promoted. */
1513 if (value)
1515 block_stmt_iterator bsi = bsi_last (bb);
1517 if (rhs == error_mark_node)
1519 insert_decl_map (id, p, var_sub);
1520 return;
1523 STRIP_USELESS_TYPE_CONVERSION (rhs);
1525 /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
1526 keep our trees in gimple form. */
1527 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
1529 def = remap_ssa_name (def, id);
1530 init_stmt = build_gimple_modify_stmt (def, rhs);
1531 SSA_NAME_DEF_STMT (def) = init_stmt;
1532 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
1533 set_default_def (var, NULL);
1535 else
1536 init_stmt = build_gimple_modify_stmt (var, rhs);
1538 /* If we did not create a gimple value and we did not create a gimple
1539 cast of a gimple value, then we will need to gimplify INIT_STMTS
1540 at the end. Note that is_gimple_cast only checks the outer
1541 tree code, not its operand. Thus the explicit check that its
1542 operand is a gimple value. */
1543 if ((!is_gimple_val (rhs)
1544 && (!is_gimple_cast (rhs)
1545 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1546 || !is_gimple_reg (var))
1548 tree_stmt_iterator i;
1550 push_gimplify_context ();
1551 gimplify_stmt (&init_stmt);
1552 if (gimple_in_ssa_p (cfun)
1553 && init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
1555 /* The replacement can expose previously unreferenced
1556 variables. */
1557 for (i = tsi_start (init_stmt); !tsi_end_p (i); tsi_next (&i))
1558 find_new_referenced_vars (tsi_stmt_ptr (i));
1560 pop_gimplify_context (NULL);
1563 /* If VAR represents a zero-sized variable, it's possible that the
1564 assignment statment may result in no gimple statements. */
1565 if (init_stmt)
1566 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1567 if (gimple_in_ssa_p (cfun))
1568 for (;!bsi_end_p (bsi); bsi_next (&bsi))
1569 mark_symbols_for_renaming (bsi_stmt (bsi));
1573 /* Generate code to initialize the parameters of the function at the
1574 top of the stack in ID from the CALL_EXPR EXP. */
1576 static void
1577 initialize_inlined_parameters (copy_body_data *id, tree exp,
1578 tree fn, basic_block bb)
1580 tree parms;
1581 tree a;
1582 tree p;
1583 tree vars = NULL_TREE;
1584 call_expr_arg_iterator iter;
1585 tree static_chain = CALL_EXPR_STATIC_CHAIN (exp);
1587 /* Figure out what the parameters are. */
1588 parms = DECL_ARGUMENTS (fn);
1590 /* Loop through the parameter declarations, replacing each with an
1591 equivalent VAR_DECL, appropriately initialized. */
1592 for (p = parms, a = first_call_expr_arg (exp, &iter); p;
1593 a = next_call_expr_arg (&iter), p = TREE_CHAIN (p))
1594 setup_one_parameter (id, p, a, fn, bb, &vars);
1596 /* Initialize the static chain. */
1597 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1598 gcc_assert (fn != current_function_decl);
1599 if (p)
1601 /* No static chain? Seems like a bug in tree-nested.c. */
1602 gcc_assert (static_chain);
1604 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1607 declare_inline_vars (id->block, vars);
1610 /* Declare a return variable to replace the RESULT_DECL for the
1611 function we are calling. An appropriate DECL_STMT is returned.
1612 The USE_STMT is filled to contain a use of the declaration to
1613 indicate the return value of the function.
1615 RETURN_SLOT, if non-null is place where to store the result. It
1616 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
1617 was the LHS of the GIMPLE_MODIFY_STMT to which this call is the RHS.
1619 The return value is a (possibly null) value that is the result of the
1620 function as seen by the callee. *USE_P is a (possibly null) value that
1621 holds the result as seen by the caller. */
1623 static tree
1624 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
1625 tree *use_p)
1627 tree callee = id->src_fn;
1628 tree caller = id->dst_fn;
1629 tree result = DECL_RESULT (callee);
1630 tree callee_type = TREE_TYPE (result);
1631 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1632 tree var, use;
1634 /* We don't need to do anything for functions that don't return
1635 anything. */
1636 if (!result || VOID_TYPE_P (callee_type))
1638 *use_p = NULL_TREE;
1639 return NULL_TREE;
1642 /* If there was a return slot, then the return value is the
1643 dereferenced address of that object. */
1644 if (return_slot)
1646 /* The front end shouldn't have used both return_slot and
1647 a modify expression. */
1648 gcc_assert (!modify_dest);
1649 if (DECL_BY_REFERENCE (result))
1651 tree return_slot_addr = build_fold_addr_expr (return_slot);
1652 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
1654 /* We are going to construct *&return_slot and we can't do that
1655 for variables believed to be not addressable.
1657 FIXME: This check possibly can match, because values returned
1658 via return slot optimization are not believed to have address
1659 taken by alias analysis. */
1660 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
1661 if (gimple_in_ssa_p (cfun))
1663 HOST_WIDE_INT bitsize;
1664 HOST_WIDE_INT bitpos;
1665 tree offset;
1666 enum machine_mode mode;
1667 int unsignedp;
1668 int volatilep;
1669 tree base;
1670 base = get_inner_reference (return_slot, &bitsize, &bitpos,
1671 &offset,
1672 &mode, &unsignedp, &volatilep,
1673 false);
1674 if (TREE_CODE (base) == INDIRECT_REF)
1675 base = TREE_OPERAND (base, 0);
1676 if (TREE_CODE (base) == SSA_NAME)
1677 base = SSA_NAME_VAR (base);
1678 mark_sym_for_renaming (base);
1680 var = return_slot_addr;
1682 else
1684 var = return_slot;
1685 gcc_assert (TREE_CODE (var) != SSA_NAME);
1687 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1688 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1689 && !DECL_GIMPLE_REG_P (result)
1690 && DECL_P (var))
1691 DECL_GIMPLE_REG_P (var) = 0;
1692 use = NULL;
1693 goto done;
1696 /* All types requiring non-trivial constructors should have been handled. */
1697 gcc_assert (!TREE_ADDRESSABLE (callee_type));
1699 /* Attempt to avoid creating a new temporary variable. */
1700 if (modify_dest
1701 && TREE_CODE (modify_dest) != SSA_NAME)
1703 bool use_it = false;
1705 /* We can't use MODIFY_DEST if there's type promotion involved. */
1706 if (!useless_type_conversion_p (callee_type, caller_type))
1707 use_it = false;
1709 /* ??? If we're assigning to a variable sized type, then we must
1710 reuse the destination variable, because we've no good way to
1711 create variable sized temporaries at this point. */
1712 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1713 use_it = true;
1715 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1716 reuse it as the result of the call directly. Don't do this if
1717 it would promote MODIFY_DEST to addressable. */
1718 else if (TREE_ADDRESSABLE (result))
1719 use_it = false;
1720 else
1722 tree base_m = get_base_address (modify_dest);
1724 /* If the base isn't a decl, then it's a pointer, and we don't
1725 know where that's going to go. */
1726 if (!DECL_P (base_m))
1727 use_it = false;
1728 else if (is_global_var (base_m))
1729 use_it = false;
1730 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1731 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1732 && !DECL_GIMPLE_REG_P (result)
1733 && DECL_GIMPLE_REG_P (base_m))
1734 use_it = false;
1735 else if (!TREE_ADDRESSABLE (base_m))
1736 use_it = true;
1739 if (use_it)
1741 var = modify_dest;
1742 use = NULL;
1743 goto done;
1747 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1749 var = copy_result_decl_to_var (result, id);
1750 if (gimple_in_ssa_p (cfun))
1752 get_var_ann (var);
1753 add_referenced_var (var);
1756 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1757 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1758 = tree_cons (NULL_TREE, var,
1759 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1761 /* Do not have the rest of GCC warn about this variable as it should
1762 not be visible to the user. */
1763 TREE_NO_WARNING (var) = 1;
1765 declare_inline_vars (id->block, var);
1767 /* Build the use expr. If the return type of the function was
1768 promoted, convert it back to the expected type. */
1769 use = var;
1770 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
1771 use = fold_convert (caller_type, var);
1773 STRIP_USELESS_TYPE_CONVERSION (use);
1775 if (DECL_BY_REFERENCE (result))
1776 var = build_fold_addr_expr (var);
1778 done:
1779 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1780 way, when the RESULT_DECL is encountered, it will be
1781 automatically replaced by the VAR_DECL. */
1782 insert_decl_map (id, result, var);
1784 /* Remember this so we can ignore it in remap_decls. */
1785 id->retvar = var;
1787 *use_p = use;
1788 return var;
1791 /* Returns nonzero if a function can be inlined as a tree. */
1793 bool
1794 tree_inlinable_function_p (tree fn)
1796 return inlinable_function_p (fn);
1799 static const char *inline_forbidden_reason;
1801 static tree
1802 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1803 void *fnp)
1805 tree node = *nodep;
1806 tree fn = (tree) fnp;
1807 tree t;
1809 switch (TREE_CODE (node))
1811 case CALL_EXPR:
1812 /* Refuse to inline alloca call unless user explicitly forced so as
1813 this may change program's memory overhead drastically when the
1814 function using alloca is called in loop. In GCC present in
1815 SPEC2000 inlining into schedule_block cause it to require 2GB of
1816 RAM instead of 256MB. */
1817 if (alloca_call_p (node)
1818 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1820 inline_forbidden_reason
1821 = G_("function %q+F can never be inlined because it uses "
1822 "alloca (override using the always_inline attribute)");
1823 return node;
1825 t = get_callee_fndecl (node);
1826 if (! t)
1827 break;
1829 /* We cannot inline functions that call setjmp. */
1830 if (setjmp_call_p (t))
1832 inline_forbidden_reason
1833 = G_("function %q+F can never be inlined because it uses setjmp");
1834 return node;
1837 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1838 switch (DECL_FUNCTION_CODE (t))
1840 /* We cannot inline functions that take a variable number of
1841 arguments. */
1842 case BUILT_IN_VA_START:
1843 case BUILT_IN_STDARG_START:
1844 case BUILT_IN_NEXT_ARG:
1845 case BUILT_IN_VA_END:
1846 inline_forbidden_reason
1847 = G_("function %q+F can never be inlined because it "
1848 "uses variable argument lists");
1849 return node;
1851 case BUILT_IN_LONGJMP:
1852 /* We can't inline functions that call __builtin_longjmp at
1853 all. The non-local goto machinery really requires the
1854 destination be in a different function. If we allow the
1855 function calling __builtin_longjmp to be inlined into the
1856 function calling __builtin_setjmp, Things will Go Awry. */
1857 inline_forbidden_reason
1858 = G_("function %q+F can never be inlined because "
1859 "it uses setjmp-longjmp exception handling");
1860 return node;
1862 case BUILT_IN_NONLOCAL_GOTO:
1863 /* Similarly. */
1864 inline_forbidden_reason
1865 = G_("function %q+F can never be inlined because "
1866 "it uses non-local goto");
1867 return node;
1869 case BUILT_IN_RETURN:
1870 case BUILT_IN_APPLY_ARGS:
1871 /* If a __builtin_apply_args caller would be inlined,
1872 it would be saving arguments of the function it has
1873 been inlined into. Similarly __builtin_return would
1874 return from the function the inline has been inlined into. */
1875 inline_forbidden_reason
1876 = G_("function %q+F can never be inlined because "
1877 "it uses __builtin_return or __builtin_apply_args");
1878 return node;
1880 default:
1881 break;
1883 break;
1885 case GOTO_EXPR:
1886 t = TREE_OPERAND (node, 0);
1888 /* We will not inline a function which uses computed goto. The
1889 addresses of its local labels, which may be tucked into
1890 global storage, are of course not constant across
1891 instantiations, which causes unexpected behavior. */
1892 if (TREE_CODE (t) != LABEL_DECL)
1894 inline_forbidden_reason
1895 = G_("function %q+F can never be inlined "
1896 "because it contains a computed goto");
1897 return node;
1899 break;
1901 case LABEL_EXPR:
1902 t = TREE_OPERAND (node, 0);
1903 if (DECL_NONLOCAL (t))
1905 /* We cannot inline a function that receives a non-local goto
1906 because we cannot remap the destination label used in the
1907 function that is performing the non-local goto. */
1908 inline_forbidden_reason
1909 = G_("function %q+F can never be inlined "
1910 "because it receives a non-local goto");
1911 return node;
1913 break;
1915 case RECORD_TYPE:
1916 case UNION_TYPE:
1917 /* We cannot inline a function of the form
1919 void F (int i) { struct S { int ar[i]; } s; }
1921 Attempting to do so produces a catch-22.
1922 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1923 UNION_TYPE nodes, then it goes into infinite recursion on a
1924 structure containing a pointer to its own type. If it doesn't,
1925 then the type node for S doesn't get adjusted properly when
1926 F is inlined.
1928 ??? This is likely no longer true, but it's too late in the 4.0
1929 cycle to try to find out. This should be checked for 4.1. */
1930 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1931 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1933 inline_forbidden_reason
1934 = G_("function %q+F can never be inlined "
1935 "because it uses variable sized variables");
1936 return node;
1939 default:
1940 break;
1943 return NULL_TREE;
1946 /* Return subexpression representing possible alloca call, if any. */
1947 static tree
1948 inline_forbidden_p (tree fndecl)
1950 location_t saved_loc = input_location;
1951 block_stmt_iterator bsi;
1952 basic_block bb;
1953 tree ret = NULL_TREE;
1955 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1956 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1958 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1959 inline_forbidden_p_1, fndecl);
1960 if (ret)
1961 goto egress;
1964 egress:
1965 input_location = saved_loc;
1966 return ret;
1969 /* Returns nonzero if FN is a function that does not have any
1970 fundamental inline blocking properties. */
1972 static bool
1973 inlinable_function_p (tree fn)
1975 bool inlinable = true;
1976 bool do_warning;
1977 tree always_inline;
1979 /* If we've already decided this function shouldn't be inlined,
1980 there's no need to check again. */
1981 if (DECL_UNINLINABLE (fn))
1982 return false;
1984 /* We only warn for functions declared `inline' by the user. */
1985 do_warning = (warn_inline
1986 && DECL_INLINE (fn)
1987 && DECL_DECLARED_INLINE_P (fn)
1988 && !DECL_IN_SYSTEM_HEADER (fn));
1990 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
1992 if (flag_really_no_inline
1993 && always_inline == NULL)
1995 if (do_warning)
1996 warning (OPT_Winline, "function %q+F can never be inlined because it "
1997 "is suppressed using -fno-inline", fn);
1998 inlinable = false;
2001 /* Don't auto-inline anything that might not be bound within
2002 this unit of translation. */
2003 else if (!DECL_DECLARED_INLINE_P (fn)
2004 && DECL_REPLACEABLE_P (fn))
2005 inlinable = false;
2007 else if (!function_attribute_inlinable_p (fn))
2009 if (do_warning)
2010 warning (OPT_Winline, "function %q+F can never be inlined because it "
2011 "uses attributes conflicting with inlining", fn);
2012 inlinable = false;
2015 /* If we don't have the function body available, we can't inline it.
2016 However, this should not be recorded since we also get here for
2017 forward declared inline functions. Therefore, return at once. */
2018 if (!DECL_SAVED_TREE (fn))
2019 return false;
2021 /* If we're not inlining at all, then we cannot inline this function. */
2022 else if (!flag_inline_trees)
2023 inlinable = false;
2025 /* Only try to inline functions if DECL_INLINE is set. This should be
2026 true for all functions declared `inline', and for all other functions
2027 as well with -finline-functions.
2029 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
2030 it's the front-end that must set DECL_INLINE in this case, because
2031 dwarf2out loses if a function that does not have DECL_INLINE set is
2032 inlined anyway. That is why we have both DECL_INLINE and
2033 DECL_DECLARED_INLINE_P. */
2034 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
2035 here should be redundant. */
2036 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
2037 inlinable = false;
2039 else if (inline_forbidden_p (fn))
2041 /* See if we should warn about uninlinable functions. Previously,
2042 some of these warnings would be issued while trying to expand
2043 the function inline, but that would cause multiple warnings
2044 about functions that would for example call alloca. But since
2045 this a property of the function, just one warning is enough.
2046 As a bonus we can now give more details about the reason why a
2047 function is not inlinable. */
2048 if (always_inline)
2049 sorry (inline_forbidden_reason, fn);
2050 else if (do_warning)
2051 warning (OPT_Winline, inline_forbidden_reason, fn);
2053 inlinable = false;
2056 /* Squirrel away the result so that we don't have to check again. */
2057 DECL_UNINLINABLE (fn) = !inlinable;
2059 return inlinable;
2062 /* Estimate the cost of a memory move. Use machine dependent
2063 word size and take possible memcpy call into account. */
2066 estimate_move_cost (tree type)
2068 HOST_WIDE_INT size;
2070 size = int_size_in_bytes (type);
2072 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
2073 /* Cost of a memcpy call, 3 arguments and the call. */
2074 return 4;
2075 else
2076 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
2079 /* Arguments for estimate_num_insns_1. */
2081 struct eni_data
2083 /* Used to return the number of insns. */
2084 int count;
2086 /* Weights of various constructs. */
2087 eni_weights *weights;
2090 /* Used by estimate_num_insns. Estimate number of instructions seen
2091 by given statement. */
2093 static tree
2094 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
2096 struct eni_data *d = data;
2097 tree x = *tp;
2098 unsigned cost;
2100 if (IS_TYPE_OR_DECL_P (x))
2102 *walk_subtrees = 0;
2103 return NULL;
2105 /* Assume that constants and references counts nothing. These should
2106 be majorized by amount of operations among them we count later
2107 and are common target of CSE and similar optimizations. */
2108 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
2109 return NULL;
2111 switch (TREE_CODE (x))
2113 /* Containers have no cost. */
2114 case TREE_LIST:
2115 case TREE_VEC:
2116 case BLOCK:
2117 case COMPONENT_REF:
2118 case BIT_FIELD_REF:
2119 case INDIRECT_REF:
2120 case ALIGN_INDIRECT_REF:
2121 case MISALIGNED_INDIRECT_REF:
2122 case ARRAY_REF:
2123 case ARRAY_RANGE_REF:
2124 case OBJ_TYPE_REF:
2125 case EXC_PTR_EXPR: /* ??? */
2126 case FILTER_EXPR: /* ??? */
2127 case COMPOUND_EXPR:
2128 case BIND_EXPR:
2129 case WITH_CLEANUP_EXPR:
2130 case NOP_EXPR:
2131 case CONVERT_EXPR:
2132 case VIEW_CONVERT_EXPR:
2133 case SAVE_EXPR:
2134 case ADDR_EXPR:
2135 case COMPLEX_EXPR:
2136 case RANGE_EXPR:
2137 case CASE_LABEL_EXPR:
2138 case SSA_NAME:
2139 case CATCH_EXPR:
2140 case EH_FILTER_EXPR:
2141 case STATEMENT_LIST:
2142 case ERROR_MARK:
2143 case NON_LVALUE_EXPR:
2144 case FDESC_EXPR:
2145 case VA_ARG_EXPR:
2146 case TRY_CATCH_EXPR:
2147 case TRY_FINALLY_EXPR:
2148 case LABEL_EXPR:
2149 case GOTO_EXPR:
2150 case RETURN_EXPR:
2151 case EXIT_EXPR:
2152 case LOOP_EXPR:
2153 case PHI_NODE:
2154 case WITH_SIZE_EXPR:
2155 case OMP_CLAUSE:
2156 case OMP_RETURN:
2157 case OMP_CONTINUE:
2158 case OMP_SECTIONS_SWITCH:
2159 case OMP_ATOMIC_STORE:
2160 break;
2162 /* We don't account constants for now. Assume that the cost is amortized
2163 by operations that do use them. We may re-consider this decision once
2164 we are able to optimize the tree before estimating its size and break
2165 out static initializers. */
2166 case IDENTIFIER_NODE:
2167 case INTEGER_CST:
2168 case REAL_CST:
2169 case FIXED_CST:
2170 case COMPLEX_CST:
2171 case VECTOR_CST:
2172 case STRING_CST:
2173 *walk_subtrees = 0;
2174 return NULL;
2176 /* CHANGE_DYNAMIC_TYPE_EXPR explicitly expands to nothing. */
2177 case CHANGE_DYNAMIC_TYPE_EXPR:
2178 *walk_subtrees = 0;
2179 return NULL;
2181 /* Try to estimate the cost of assignments. We have three cases to
2182 deal with:
2183 1) Simple assignments to registers;
2184 2) Stores to things that must live in memory. This includes
2185 "normal" stores to scalars, but also assignments of large
2186 structures, or constructors of big arrays;
2187 3) TARGET_EXPRs.
2189 Let us look at the first two cases, assuming we have "a = b + C":
2190 <GIMPLE_MODIFY_STMT <var_decl "a">
2191 <plus_expr <var_decl "b"> <constant C>>
2192 If "a" is a GIMPLE register, the assignment to it is free on almost
2193 any target, because "a" usually ends up in a real register. Hence
2194 the only cost of this expression comes from the PLUS_EXPR, and we
2195 can ignore the GIMPLE_MODIFY_STMT.
2196 If "a" is not a GIMPLE register, the assignment to "a" will most
2197 likely be a real store, so the cost of the GIMPLE_MODIFY_STMT is the cost
2198 of moving something into "a", which we compute using the function
2199 estimate_move_cost.
2201 The third case deals with TARGET_EXPRs, for which the semantics are
2202 that a temporary is assigned, unless the TARGET_EXPR itself is being
2203 assigned to something else. In the latter case we do not need the
2204 temporary. E.g. in:
2205 <GIMPLE_MODIFY_STMT <var_decl "a"> <target_expr>>, the
2206 GIMPLE_MODIFY_STMT is free. */
2207 case INIT_EXPR:
2208 case GIMPLE_MODIFY_STMT:
2209 /* Is the right and side a TARGET_EXPR? */
2210 if (TREE_CODE (GENERIC_TREE_OPERAND (x, 1)) == TARGET_EXPR)
2211 break;
2212 /* ... fall through ... */
2214 case TARGET_EXPR:
2215 x = GENERIC_TREE_OPERAND (x, 0);
2216 /* Is this an assignments to a register? */
2217 if (is_gimple_reg (x))
2218 break;
2219 /* Otherwise it's a store, so fall through to compute the move cost. */
2221 case CONSTRUCTOR:
2222 d->count += estimate_move_cost (TREE_TYPE (x));
2223 break;
2225 /* Assign cost of 1 to usual operations.
2226 ??? We may consider mapping RTL costs to this. */
2227 case COND_EXPR:
2228 case VEC_COND_EXPR:
2230 case PLUS_EXPR:
2231 case POINTER_PLUS_EXPR:
2232 case MINUS_EXPR:
2233 case MULT_EXPR:
2235 case FIXED_CONVERT_EXPR:
2236 case FIX_TRUNC_EXPR:
2238 case NEGATE_EXPR:
2239 case FLOAT_EXPR:
2240 case MIN_EXPR:
2241 case MAX_EXPR:
2242 case ABS_EXPR:
2244 case LSHIFT_EXPR:
2245 case RSHIFT_EXPR:
2246 case LROTATE_EXPR:
2247 case RROTATE_EXPR:
2248 case VEC_LSHIFT_EXPR:
2249 case VEC_RSHIFT_EXPR:
2251 case BIT_IOR_EXPR:
2252 case BIT_XOR_EXPR:
2253 case BIT_AND_EXPR:
2254 case BIT_NOT_EXPR:
2256 case TRUTH_ANDIF_EXPR:
2257 case TRUTH_ORIF_EXPR:
2258 case TRUTH_AND_EXPR:
2259 case TRUTH_OR_EXPR:
2260 case TRUTH_XOR_EXPR:
2261 case TRUTH_NOT_EXPR:
2263 case LT_EXPR:
2264 case LE_EXPR:
2265 case GT_EXPR:
2266 case GE_EXPR:
2267 case EQ_EXPR:
2268 case NE_EXPR:
2269 case ORDERED_EXPR:
2270 case UNORDERED_EXPR:
2272 case UNLT_EXPR:
2273 case UNLE_EXPR:
2274 case UNGT_EXPR:
2275 case UNGE_EXPR:
2276 case UNEQ_EXPR:
2277 case LTGT_EXPR:
2279 case CONJ_EXPR:
2281 case PREDECREMENT_EXPR:
2282 case PREINCREMENT_EXPR:
2283 case POSTDECREMENT_EXPR:
2284 case POSTINCREMENT_EXPR:
2286 case ASM_EXPR:
2288 case REALIGN_LOAD_EXPR:
2290 case REDUC_MAX_EXPR:
2291 case REDUC_MIN_EXPR:
2292 case REDUC_PLUS_EXPR:
2293 case WIDEN_SUM_EXPR:
2294 case DOT_PROD_EXPR:
2295 case VEC_WIDEN_MULT_HI_EXPR:
2296 case VEC_WIDEN_MULT_LO_EXPR:
2297 case VEC_UNPACK_HI_EXPR:
2298 case VEC_UNPACK_LO_EXPR:
2299 case VEC_UNPACK_FLOAT_HI_EXPR:
2300 case VEC_UNPACK_FLOAT_LO_EXPR:
2301 case VEC_PACK_TRUNC_EXPR:
2302 case VEC_PACK_SAT_EXPR:
2303 case VEC_PACK_FIX_TRUNC_EXPR:
2305 case WIDEN_MULT_EXPR:
2307 case VEC_EXTRACT_EVEN_EXPR:
2308 case VEC_EXTRACT_ODD_EXPR:
2309 case VEC_INTERLEAVE_HIGH_EXPR:
2310 case VEC_INTERLEAVE_LOW_EXPR:
2312 case RESX_EXPR:
2313 d->count += 1;
2314 break;
2316 case SWITCH_EXPR:
2317 /* TODO: Cost of a switch should be derived from the number of
2318 branches. */
2319 d->count += d->weights->switch_cost;
2320 break;
2322 /* Few special cases of expensive operations. This is useful
2323 to avoid inlining on functions having too many of these. */
2324 case TRUNC_DIV_EXPR:
2325 case CEIL_DIV_EXPR:
2326 case FLOOR_DIV_EXPR:
2327 case ROUND_DIV_EXPR:
2328 case EXACT_DIV_EXPR:
2329 case TRUNC_MOD_EXPR:
2330 case CEIL_MOD_EXPR:
2331 case FLOOR_MOD_EXPR:
2332 case ROUND_MOD_EXPR:
2333 case RDIV_EXPR:
2334 d->count += d->weights->div_mod_cost;
2335 break;
2336 case CALL_EXPR:
2338 tree decl = get_callee_fndecl (x);
2340 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
2341 cost = d->weights->target_builtin_call_cost;
2342 else
2343 cost = d->weights->call_cost;
2345 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
2346 switch (DECL_FUNCTION_CODE (decl))
2348 case BUILT_IN_CONSTANT_P:
2349 *walk_subtrees = 0;
2350 return NULL_TREE;
2351 case BUILT_IN_EXPECT:
2352 return NULL_TREE;
2353 /* Prefetch instruction is not expensive. */
2354 case BUILT_IN_PREFETCH:
2355 cost = 1;
2356 break;
2357 default:
2358 break;
2361 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
2362 that does use function declaration to figure out the arguments. */
2363 if (!decl)
2365 tree a;
2366 call_expr_arg_iterator iter;
2367 FOR_EACH_CALL_EXPR_ARG (a, iter, x)
2368 d->count += estimate_move_cost (TREE_TYPE (a));
2370 else
2372 tree arg;
2373 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2374 d->count += estimate_move_cost (TREE_TYPE (arg));
2377 d->count += cost;
2378 break;
2381 case OMP_PARALLEL:
2382 case OMP_FOR:
2383 case OMP_SECTIONS:
2384 case OMP_SINGLE:
2385 case OMP_SECTION:
2386 case OMP_MASTER:
2387 case OMP_ORDERED:
2388 case OMP_CRITICAL:
2389 case OMP_ATOMIC:
2390 case OMP_ATOMIC_LOAD:
2391 /* OpenMP directives are generally very expensive. */
2392 d->count += d->weights->omp_cost;
2393 break;
2395 default:
2396 gcc_unreachable ();
2398 return NULL;
2401 /* Estimate number of instructions that will be created by expanding EXPR.
2402 WEIGHTS contains weights attributed to various constructs. */
2405 estimate_num_insns (tree expr, eni_weights *weights)
2407 struct pointer_set_t *visited_nodes;
2408 basic_block bb;
2409 block_stmt_iterator bsi;
2410 struct function *my_function;
2411 struct eni_data data;
2413 data.count = 0;
2414 data.weights = weights;
2416 /* If we're given an entire function, walk the CFG. */
2417 if (TREE_CODE (expr) == FUNCTION_DECL)
2419 my_function = DECL_STRUCT_FUNCTION (expr);
2420 gcc_assert (my_function && my_function->cfg);
2421 visited_nodes = pointer_set_create ();
2422 FOR_EACH_BB_FN (bb, my_function)
2424 for (bsi = bsi_start (bb);
2425 !bsi_end_p (bsi);
2426 bsi_next (&bsi))
2428 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
2429 &data, visited_nodes);
2432 pointer_set_destroy (visited_nodes);
2434 else
2435 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &data);
2437 return data.count;
2440 /* Initializes weights used by estimate_num_insns. */
2442 void
2443 init_inline_once (void)
2445 eni_inlining_weights.call_cost = PARAM_VALUE (PARAM_INLINE_CALL_COST);
2446 eni_inlining_weights.target_builtin_call_cost = 1;
2447 eni_inlining_weights.div_mod_cost = 10;
2448 eni_inlining_weights.switch_cost = 1;
2449 eni_inlining_weights.omp_cost = 40;
2451 eni_size_weights.call_cost = 1;
2452 eni_size_weights.target_builtin_call_cost = 1;
2453 eni_size_weights.div_mod_cost = 1;
2454 eni_size_weights.switch_cost = 10;
2455 eni_size_weights.omp_cost = 40;
2457 /* Estimating time for call is difficult, since we have no idea what the
2458 called function does. In the current uses of eni_time_weights,
2459 underestimating the cost does less harm than overestimating it, so
2460 we choose a rather small value here. */
2461 eni_time_weights.call_cost = 10;
2462 eni_time_weights.target_builtin_call_cost = 10;
2463 eni_time_weights.div_mod_cost = 10;
2464 eni_time_weights.switch_cost = 4;
2465 eni_time_weights.omp_cost = 40;
2468 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
2469 static void
2470 add_lexical_block (tree current_block, tree new_block)
2472 tree *blk_p;
2474 /* Walk to the last sub-block. */
2475 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
2476 *blk_p;
2477 blk_p = &BLOCK_CHAIN (*blk_p))
2479 *blk_p = new_block;
2480 BLOCK_SUPERCONTEXT (new_block) = current_block;
2483 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
2485 static bool
2486 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
2488 copy_body_data *id;
2489 tree t;
2490 tree use_retvar;
2491 tree fn;
2492 struct pointer_map_t *st;
2493 tree return_slot;
2494 tree modify_dest;
2495 location_t saved_location;
2496 struct cgraph_edge *cg_edge;
2497 const char *reason;
2498 basic_block return_block;
2499 edge e;
2500 block_stmt_iterator bsi, stmt_bsi;
2501 bool successfully_inlined = FALSE;
2502 bool purge_dead_abnormal_edges;
2503 tree t_step;
2504 tree var;
2506 /* See what we've got. */
2507 id = (copy_body_data *) data;
2508 t = *tp;
2510 /* Set input_location here so we get the right instantiation context
2511 if we call instantiate_decl from inlinable_function_p. */
2512 saved_location = input_location;
2513 if (EXPR_HAS_LOCATION (t))
2514 input_location = EXPR_LOCATION (t);
2516 /* From here on, we're only interested in CALL_EXPRs. */
2517 if (TREE_CODE (t) != CALL_EXPR)
2518 goto egress;
2520 /* First, see if we can figure out what function is being called.
2521 If we cannot, then there is no hope of inlining the function. */
2522 fn = get_callee_fndecl (t);
2523 if (!fn)
2524 goto egress;
2526 /* Turn forward declarations into real ones. */
2527 fn = cgraph_node (fn)->decl;
2529 /* If fn is a declaration of a function in a nested scope that was
2530 globally declared inline, we don't set its DECL_INITIAL.
2531 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
2532 C++ front-end uses it for cdtors to refer to their internal
2533 declarations, that are not real functions. Fortunately those
2534 don't have trees to be saved, so we can tell by checking their
2535 DECL_SAVED_TREE. */
2536 if (! DECL_INITIAL (fn)
2537 && DECL_ABSTRACT_ORIGIN (fn)
2538 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
2539 fn = DECL_ABSTRACT_ORIGIN (fn);
2541 /* Objective C and fortran still calls tree_rest_of_compilation directly.
2542 Kill this check once this is fixed. */
2543 if (!id->dst_node->analyzed)
2544 goto egress;
2546 cg_edge = cgraph_edge (id->dst_node, stmt);
2548 /* Constant propagation on argument done during previous inlining
2549 may create new direct call. Produce an edge for it. */
2550 if (!cg_edge)
2552 struct cgraph_node *dest = cgraph_node (fn);
2554 /* We have missing edge in the callgraph. This can happen in one case
2555 where previous inlining turned indirect call into direct call by
2556 constant propagating arguments. In all other cases we hit a bug
2557 (incorrect node sharing is most common reason for missing edges. */
2558 gcc_assert (dest->needed || !flag_unit_at_a_time);
2559 cgraph_create_edge (id->dst_node, dest, stmt,
2560 bb->count, CGRAPH_FREQ_BASE,
2561 bb->loop_depth)->inline_failed
2562 = N_("originally indirect function call not considered for inlining");
2563 if (dump_file)
2565 fprintf (dump_file, "Created new direct edge to %s",
2566 cgraph_node_name (dest));
2568 goto egress;
2571 /* Don't try to inline functions that are not well-suited to
2572 inlining. */
2573 if (!cgraph_inline_p (cg_edge, &reason))
2575 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2576 /* Avoid warnings during early inline pass. */
2577 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2579 sorry ("inlining failed in call to %q+F: %s", fn, reason);
2580 sorry ("called from here");
2582 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2583 && !DECL_IN_SYSTEM_HEADER (fn)
2584 && strlen (reason)
2585 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2586 /* Avoid warnings during early inline pass. */
2587 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2589 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2590 fn, reason);
2591 warning (OPT_Winline, "called from here");
2593 goto egress;
2595 fn = cg_edge->callee->decl;
2597 #ifdef ENABLE_CHECKING
2598 if (cg_edge->callee->decl != id->dst_node->decl)
2599 verify_cgraph_node (cg_edge->callee);
2600 #endif
2602 /* We will be inlining this callee. */
2603 id->eh_region = lookup_stmt_eh_region (stmt);
2605 /* Split the block holding the CALL_EXPR. */
2606 e = split_block (bb, stmt);
2607 bb = e->src;
2608 return_block = e->dest;
2609 remove_edge (e);
2611 /* split_block splits after the statement; work around this by
2612 moving the call into the second block manually. Not pretty,
2613 but seems easier than doing the CFG manipulation by hand
2614 when the CALL_EXPR is in the last statement of BB. */
2615 stmt_bsi = bsi_last (bb);
2616 bsi_remove (&stmt_bsi, false);
2618 /* If the CALL_EXPR was in the last statement of BB, it may have
2619 been the source of abnormal edges. In this case, schedule
2620 the removal of dead abnormal edges. */
2621 bsi = bsi_start (return_block);
2622 if (bsi_end_p (bsi))
2624 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2625 purge_dead_abnormal_edges = true;
2627 else
2629 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2630 purge_dead_abnormal_edges = false;
2633 stmt_bsi = bsi_start (return_block);
2635 /* Build a block containing code to initialize the arguments, the
2636 actual inline expansion of the body, and a label for the return
2637 statements within the function to jump to. The type of the
2638 statement expression is the return type of the function call. */
2639 id->block = make_node (BLOCK);
2640 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2641 BLOCK_SOURCE_LOCATION (id->block) = input_location;
2642 add_lexical_block (TREE_BLOCK (stmt), id->block);
2644 /* Local declarations will be replaced by their equivalents in this
2645 map. */
2646 st = id->decl_map;
2647 id->decl_map = pointer_map_create ();
2649 /* Record the function we are about to inline. */
2650 id->src_fn = fn;
2651 id->src_node = cg_edge->callee;
2652 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
2653 id->call_expr = t;
2655 gcc_assert (!id->src_cfun->after_inlining);
2657 initialize_inlined_parameters (id, t, fn, bb);
2659 if (DECL_INITIAL (fn))
2660 add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2662 /* Return statements in the function body will be replaced by jumps
2663 to the RET_LABEL. */
2665 gcc_assert (DECL_INITIAL (fn));
2666 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2668 /* Find the lhs to which the result of this call is assigned. */
2669 return_slot = NULL;
2670 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
2672 modify_dest = GIMPLE_STMT_OPERAND (stmt, 0);
2674 /* The function which we are inlining might not return a value,
2675 in which case we should issue a warning that the function
2676 does not return a value. In that case the optimizers will
2677 see that the variable to which the value is assigned was not
2678 initialized. We do not want to issue a warning about that
2679 uninitialized variable. */
2680 if (DECL_P (modify_dest))
2681 TREE_NO_WARNING (modify_dest) = 1;
2682 if (CALL_EXPR_RETURN_SLOT_OPT (t))
2684 return_slot = modify_dest;
2685 modify_dest = NULL;
2688 else
2689 modify_dest = NULL;
2691 /* Declare the return variable for the function. */
2692 declare_return_variable (id, return_slot,
2693 modify_dest, &use_retvar);
2695 /* This is it. Duplicate the callee body. Assume callee is
2696 pre-gimplified. Note that we must not alter the caller
2697 function in any way before this point, as this CALL_EXPR may be
2698 a self-referential call; if we're calling ourselves, we need to
2699 duplicate our body before altering anything. */
2700 copy_body (id, bb->count, bb->frequency, bb, return_block);
2702 /* Add local vars in this inlined callee to caller. */
2703 t_step = id->src_cfun->unexpanded_var_list;
2704 for (; t_step; t_step = TREE_CHAIN (t_step))
2706 var = TREE_VALUE (t_step);
2707 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2708 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2709 cfun->unexpanded_var_list);
2710 else
2711 cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2712 cfun->unexpanded_var_list);
2715 /* Clean up. */
2716 pointer_map_destroy (id->decl_map);
2717 id->decl_map = st;
2719 /* If the inlined function returns a result that we care about,
2720 clobber the CALL_EXPR with a reference to the return variable. */
2721 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2723 *tp = use_retvar;
2724 if (gimple_in_ssa_p (cfun))
2726 update_stmt (stmt);
2727 mark_symbols_for_renaming (stmt);
2729 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2731 else
2732 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2733 tsi_delink() will leave the iterator in a sane state. */
2735 /* Handle case of inlining function that miss return statement so
2736 return value becomes undefined. */
2737 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
2738 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
2740 tree name = TREE_OPERAND (stmt, 0);
2741 tree var = SSA_NAME_VAR (TREE_OPERAND (stmt, 0));
2742 tree def = gimple_default_def (cfun, var);
2744 /* If the variable is used undefined, make this name undefined via
2745 move. */
2746 if (def)
2748 TREE_OPERAND (stmt, 1) = def;
2749 update_stmt (stmt);
2751 /* Otherwise make this variable undefined. */
2752 else
2754 bsi_remove (&stmt_bsi, true);
2755 set_default_def (var, name);
2756 SSA_NAME_DEF_STMT (name) = build_empty_stmt ();
2759 else
2760 bsi_remove (&stmt_bsi, true);
2763 if (purge_dead_abnormal_edges)
2764 tree_purge_dead_abnormal_call_edges (return_block);
2766 /* If the value of the new expression is ignored, that's OK. We
2767 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2768 the equivalent inlined version either. */
2769 TREE_USED (*tp) = 1;
2771 /* Output the inlining info for this abstract function, since it has been
2772 inlined. If we don't do this now, we can lose the information about the
2773 variables in the function when the blocks get blown away as soon as we
2774 remove the cgraph node. */
2775 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2777 /* Update callgraph if needed. */
2778 cgraph_remove_node (cg_edge->callee);
2780 id->block = NULL_TREE;
2781 successfully_inlined = TRUE;
2783 egress:
2784 input_location = saved_location;
2785 return successfully_inlined;
2788 /* Expand call statements reachable from STMT_P.
2789 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2790 in a GIMPLE_MODIFY_STMT. See tree-gimple.c:get_call_expr_in(). We can
2791 unfortunately not use that function here because we need a pointer
2792 to the CALL_EXPR, not the tree itself. */
2794 static bool
2795 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
2797 block_stmt_iterator bsi;
2799 /* Register specific tree functions. */
2800 tree_register_cfg_hooks ();
2801 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2803 tree *expr_p = bsi_stmt_ptr (bsi);
2804 tree stmt = *expr_p;
2806 if (TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT)
2807 expr_p = &GIMPLE_STMT_OPERAND (*expr_p, 1);
2808 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2809 expr_p = &TREE_OPERAND (*expr_p, 0);
2810 if (TREE_CODE (*expr_p) == CALL_EXPR)
2811 if (expand_call_inline (bb, stmt, expr_p, id))
2812 return true;
2814 return false;
2817 /* Walk all basic blocks created after FIRST and try to fold every statement
2818 in the STATEMENTS pointer set. */
2819 static void
2820 fold_marked_statements (int first, struct pointer_set_t *statements)
2822 for (;first < n_basic_blocks;first++)
2823 if (BASIC_BLOCK (first))
2825 block_stmt_iterator bsi;
2826 for (bsi = bsi_start (BASIC_BLOCK (first));
2827 !bsi_end_p (bsi); bsi_next (&bsi))
2828 if (pointer_set_contains (statements, bsi_stmt (bsi)))
2830 tree old_stmt = bsi_stmt (bsi);
2831 if (fold_stmt (bsi_stmt_ptr (bsi)))
2833 update_stmt (bsi_stmt (bsi));
2834 if (maybe_clean_or_replace_eh_stmt (old_stmt, bsi_stmt (bsi)))
2835 tree_purge_dead_eh_edges (BASIC_BLOCK (first));
2841 /* Return true if BB has at least one abnormal outgoing edge. */
2843 static inline bool
2844 has_abnormal_outgoing_edge_p (basic_block bb)
2846 edge e;
2847 edge_iterator ei;
2849 FOR_EACH_EDGE (e, ei, bb->succs)
2850 if (e->flags & EDGE_ABNORMAL)
2851 return true;
2853 return false;
2856 /* Expand calls to inline functions in the body of FN. */
2858 unsigned int
2859 optimize_inline_calls (tree fn)
2861 copy_body_data id;
2862 tree prev_fn;
2863 basic_block bb;
2864 int last = n_basic_blocks;
2865 /* There is no point in performing inlining if errors have already
2866 occurred -- and we might crash if we try to inline invalid
2867 code. */
2868 if (errorcount || sorrycount)
2869 return 0;
2871 /* Clear out ID. */
2872 memset (&id, 0, sizeof (id));
2874 id.src_node = id.dst_node = cgraph_node (fn);
2875 id.dst_fn = fn;
2876 /* Or any functions that aren't finished yet. */
2877 prev_fn = NULL_TREE;
2878 if (current_function_decl)
2880 id.dst_fn = current_function_decl;
2881 prev_fn = current_function_decl;
2884 id.copy_decl = copy_decl_maybe_to_var;
2885 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2886 id.transform_new_cfg = false;
2887 id.transform_return_to_modify = true;
2888 id.transform_lang_insert_block = false;
2889 id.statements_to_fold = pointer_set_create ();
2891 push_gimplify_context ();
2893 /* We make no attempts to keep dominance info up-to-date. */
2894 free_dominance_info (CDI_DOMINATORS);
2895 free_dominance_info (CDI_POST_DOMINATORS);
2897 /* Reach the trees by walking over the CFG, and note the
2898 enclosing basic-blocks in the call edges. */
2899 /* We walk the blocks going forward, because inlined function bodies
2900 will split id->current_basic_block, and the new blocks will
2901 follow it; we'll trudge through them, processing their CALL_EXPRs
2902 along the way. */
2903 FOR_EACH_BB (bb)
2904 gimple_expand_calls_inline (bb, &id);
2906 pop_gimplify_context (NULL);
2908 #ifdef ENABLE_CHECKING
2910 struct cgraph_edge *e;
2912 verify_cgraph_node (id.dst_node);
2914 /* Double check that we inlined everything we are supposed to inline. */
2915 for (e = id.dst_node->callees; e; e = e->next_callee)
2916 gcc_assert (e->inline_failed);
2918 #endif
2920 /* Fold the statements before compacting/renumbering the basic blocks. */
2921 fold_marked_statements (last, id.statements_to_fold);
2922 pointer_set_destroy (id.statements_to_fold);
2924 /* Renumber the (code) basic_blocks consecutively. */
2925 compact_blocks ();
2926 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2927 number_blocks (fn);
2929 /* We are not going to maintain the cgraph edges up to date.
2930 Kill it so it won't confuse us. */
2931 cgraph_node_remove_callees (id.dst_node);
2933 fold_cond_expr_cond ();
2934 /* It would be nice to check SSA/CFG/statement consistency here, but it is
2935 not possible yet - the IPA passes might make various functions to not
2936 throw and they don't care to proactively update local EH info. This is
2937 done later in fixup_cfg pass that also execute the verification. */
2938 return (TODO_update_ssa | TODO_cleanup_cfg
2939 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
2940 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
2943 /* FN is a function that has a complete body, and CLONE is a function whose
2944 body is to be set to a copy of FN, mapping argument declarations according
2945 to the ARG_MAP splay_tree. */
2947 void
2948 clone_body (tree clone, tree fn, void *arg_map)
2950 copy_body_data id;
2952 /* Clone the body, as if we were making an inline call. But, remap the
2953 parameters in the callee to the parameters of caller. */
2954 memset (&id, 0, sizeof (id));
2955 id.src_fn = fn;
2956 id.dst_fn = clone;
2957 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
2958 id.decl_map = (struct pointer_map_t *)arg_map;
2960 id.copy_decl = copy_decl_no_change;
2961 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2962 id.transform_new_cfg = true;
2963 id.transform_return_to_modify = false;
2964 id.transform_lang_insert_block = true;
2966 /* We're not inside any EH region. */
2967 id.eh_region = -1;
2969 /* Actually copy the body. */
2970 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2973 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2975 tree
2976 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2978 enum tree_code code = TREE_CODE (*tp);
2979 enum tree_code_class cl = TREE_CODE_CLASS (code);
2981 /* We make copies of most nodes. */
2982 if (IS_EXPR_CODE_CLASS (cl)
2983 || IS_GIMPLE_STMT_CODE_CLASS (cl)
2984 || code == TREE_LIST
2985 || code == TREE_VEC
2986 || code == TYPE_DECL
2987 || code == OMP_CLAUSE)
2989 /* Because the chain gets clobbered when we make a copy, we save it
2990 here. */
2991 tree chain = NULL_TREE, new;
2993 if (!GIMPLE_TUPLE_P (*tp))
2994 chain = TREE_CHAIN (*tp);
2996 /* Copy the node. */
2997 new = copy_node (*tp);
2999 /* Propagate mudflap marked-ness. */
3000 if (flag_mudflap && mf_marked_p (*tp))
3001 mf_mark (new);
3003 *tp = new;
3005 /* Now, restore the chain, if appropriate. That will cause
3006 walk_tree to walk into the chain as well. */
3007 if (code == PARM_DECL
3008 || code == TREE_LIST
3009 || code == OMP_CLAUSE)
3010 TREE_CHAIN (*tp) = chain;
3012 /* For now, we don't update BLOCKs when we make copies. So, we
3013 have to nullify all BIND_EXPRs. */
3014 if (TREE_CODE (*tp) == BIND_EXPR)
3015 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
3017 else if (code == CONSTRUCTOR)
3019 /* CONSTRUCTOR nodes need special handling because
3020 we need to duplicate the vector of elements. */
3021 tree new;
3023 new = copy_node (*tp);
3025 /* Propagate mudflap marked-ness. */
3026 if (flag_mudflap && mf_marked_p (*tp))
3027 mf_mark (new);
3029 CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
3030 CONSTRUCTOR_ELTS (*tp));
3031 *tp = new;
3033 else if (TREE_CODE_CLASS (code) == tcc_type)
3034 *walk_subtrees = 0;
3035 else if (TREE_CODE_CLASS (code) == tcc_declaration)
3036 *walk_subtrees = 0;
3037 else if (TREE_CODE_CLASS (code) == tcc_constant)
3038 *walk_subtrees = 0;
3039 else
3040 gcc_assert (code != STATEMENT_LIST);
3041 return NULL_TREE;
3044 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
3045 information indicating to what new SAVE_EXPR this one should be mapped,
3046 use that one. Otherwise, create a new node and enter it in ST. FN is
3047 the function into which the copy will be placed. */
3049 static void
3050 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
3052 struct pointer_map_t *st = (struct pointer_map_t *) st_;
3053 tree *n;
3054 tree t;
3056 /* See if we already encountered this SAVE_EXPR. */
3057 n = (tree *) pointer_map_contains (st, *tp);
3059 /* If we didn't already remap this SAVE_EXPR, do so now. */
3060 if (!n)
3062 t = copy_node (*tp);
3064 /* Remember this SAVE_EXPR. */
3065 *pointer_map_insert (st, *tp) = t;
3066 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
3067 *pointer_map_insert (st, t) = t;
3069 else
3071 /* We've already walked into this SAVE_EXPR; don't do it again. */
3072 *walk_subtrees = 0;
3073 t = *n;
3076 /* Replace this SAVE_EXPR with the copy. */
3077 *tp = t;
3080 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
3081 copies the declaration and enters it in the splay_tree in DATA (which is
3082 really an `copy_body_data *'). */
3084 static tree
3085 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3086 void *data)
3088 copy_body_data *id = (copy_body_data *) data;
3090 /* Don't walk into types. */
3091 if (TYPE_P (*tp))
3092 *walk_subtrees = 0;
3094 else if (TREE_CODE (*tp) == LABEL_EXPR)
3096 tree decl = TREE_OPERAND (*tp, 0);
3098 /* Copy the decl and remember the copy. */
3099 insert_decl_map (id, decl, id->copy_decl (decl, id));
3102 return NULL_TREE;
3105 /* Perform any modifications to EXPR required when it is unsaved. Does
3106 not recurse into EXPR's subtrees. */
3108 static void
3109 unsave_expr_1 (tree expr)
3111 switch (TREE_CODE (expr))
3113 case TARGET_EXPR:
3114 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3115 It's OK for this to happen if it was part of a subtree that
3116 isn't immediately expanded, such as operand 2 of another
3117 TARGET_EXPR. */
3118 if (TREE_OPERAND (expr, 1))
3119 break;
3121 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3122 TREE_OPERAND (expr, 3) = NULL_TREE;
3123 break;
3125 default:
3126 break;
3130 /* Called via walk_tree when an expression is unsaved. Using the
3131 splay_tree pointed to by ST (which is really a `splay_tree'),
3132 remaps all local declarations to appropriate replacements. */
3134 static tree
3135 unsave_r (tree *tp, int *walk_subtrees, void *data)
3137 copy_body_data *id = (copy_body_data *) data;
3138 struct pointer_map_t *st = id->decl_map;
3139 tree *n;
3141 /* Only a local declaration (variable or label). */
3142 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
3143 || TREE_CODE (*tp) == LABEL_DECL)
3145 /* Lookup the declaration. */
3146 n = (tree *) pointer_map_contains (st, *tp);
3148 /* If it's there, remap it. */
3149 if (n)
3150 *tp = *n;
3153 else if (TREE_CODE (*tp) == STATEMENT_LIST)
3154 copy_statement_list (tp);
3155 else if (TREE_CODE (*tp) == BIND_EXPR)
3156 copy_bind_expr (tp, walk_subtrees, id);
3157 else if (TREE_CODE (*tp) == SAVE_EXPR)
3158 remap_save_expr (tp, st, walk_subtrees);
3159 else
3161 copy_tree_r (tp, walk_subtrees, NULL);
3163 /* Do whatever unsaving is required. */
3164 unsave_expr_1 (*tp);
3167 /* Keep iterating. */
3168 return NULL_TREE;
3171 /* Copies everything in EXPR and replaces variables, labels
3172 and SAVE_EXPRs local to EXPR. */
3174 tree
3175 unsave_expr_now (tree expr)
3177 copy_body_data id;
3179 /* There's nothing to do for NULL_TREE. */
3180 if (expr == 0)
3181 return expr;
3183 /* Set up ID. */
3184 memset (&id, 0, sizeof (id));
3185 id.src_fn = current_function_decl;
3186 id.dst_fn = current_function_decl;
3187 id.decl_map = pointer_map_create ();
3189 id.copy_decl = copy_decl_no_change;
3190 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3191 id.transform_new_cfg = false;
3192 id.transform_return_to_modify = false;
3193 id.transform_lang_insert_block = false;
3195 /* Walk the tree once to find local labels. */
3196 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
3198 /* Walk the tree again, copying, remapping, and unsaving. */
3199 walk_tree (&expr, unsave_r, &id, NULL);
3201 /* Clean up. */
3202 pointer_map_destroy (id.decl_map);
3204 return expr;
3207 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
3209 static tree
3210 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
3212 if (*tp == data)
3213 return (tree) data;
3214 else
3215 return NULL;
3218 bool
3219 debug_find_tree (tree top, tree search)
3221 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
3225 /* Declare the variables created by the inliner. Add all the variables in
3226 VARS to BIND_EXPR. */
3228 static void
3229 declare_inline_vars (tree block, tree vars)
3231 tree t;
3232 for (t = vars; t; t = TREE_CHAIN (t))
3234 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
3235 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
3236 cfun->unexpanded_var_list =
3237 tree_cons (NULL_TREE, t,
3238 cfun->unexpanded_var_list);
3241 if (block)
3242 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
3246 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
3247 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
3248 VAR_DECL translation. */
3250 static tree
3251 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
3253 /* Don't generate debug information for the copy if we wouldn't have
3254 generated it for the copy either. */
3255 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
3256 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
3258 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
3259 declaration inspired this copy. */
3260 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
3262 /* The new variable/label has no RTL, yet. */
3263 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
3264 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
3265 SET_DECL_RTL (copy, NULL_RTX);
3267 /* These args would always appear unused, if not for this. */
3268 TREE_USED (copy) = 1;
3270 /* Set the context for the new declaration. */
3271 if (!DECL_CONTEXT (decl))
3272 /* Globals stay global. */
3274 else if (DECL_CONTEXT (decl) != id->src_fn)
3275 /* Things that weren't in the scope of the function we're inlining
3276 from aren't in the scope we're inlining to, either. */
3278 else if (TREE_STATIC (decl))
3279 /* Function-scoped static variables should stay in the original
3280 function. */
3282 else
3283 /* Ordinary automatic local variables are now in the scope of the
3284 new function. */
3285 DECL_CONTEXT (copy) = id->dst_fn;
3287 return copy;
3290 static tree
3291 copy_decl_to_var (tree decl, copy_body_data *id)
3293 tree copy, type;
3295 gcc_assert (TREE_CODE (decl) == PARM_DECL
3296 || TREE_CODE (decl) == RESULT_DECL);
3298 type = TREE_TYPE (decl);
3300 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3301 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3302 TREE_READONLY (copy) = TREE_READONLY (decl);
3303 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3304 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3305 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
3307 return copy_decl_for_dup_finish (id, decl, copy);
3310 /* Like copy_decl_to_var, but create a return slot object instead of a
3311 pointer variable for return by invisible reference. */
3313 static tree
3314 copy_result_decl_to_var (tree decl, copy_body_data *id)
3316 tree copy, type;
3318 gcc_assert (TREE_CODE (decl) == PARM_DECL
3319 || TREE_CODE (decl) == RESULT_DECL);
3321 type = TREE_TYPE (decl);
3322 if (DECL_BY_REFERENCE (decl))
3323 type = TREE_TYPE (type);
3325 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3326 TREE_READONLY (copy) = TREE_READONLY (decl);
3327 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3328 if (!DECL_BY_REFERENCE (decl))
3330 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3331 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3332 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
3335 return copy_decl_for_dup_finish (id, decl, copy);
3339 static tree
3340 copy_decl_no_change (tree decl, copy_body_data *id)
3342 tree copy;
3344 copy = copy_node (decl);
3346 /* The COPY is not abstract; it will be generated in DST_FN. */
3347 DECL_ABSTRACT (copy) = 0;
3348 lang_hooks.dup_lang_specific_decl (copy);
3350 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
3351 been taken; it's for internal bookkeeping in expand_goto_internal. */
3352 if (TREE_CODE (copy) == LABEL_DECL)
3354 TREE_ADDRESSABLE (copy) = 0;
3355 LABEL_DECL_UID (copy) = -1;
3358 return copy_decl_for_dup_finish (id, decl, copy);
3361 static tree
3362 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
3364 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
3365 return copy_decl_to_var (decl, id);
3366 else
3367 return copy_decl_no_change (decl, id);
3370 /* Return a copy of the function's argument tree. */
3371 static tree
3372 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id)
3374 tree *arg_copy, *parg;
3376 arg_copy = &orig_parm;
3377 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
3379 tree new = remap_decl (*parg, id);
3380 lang_hooks.dup_lang_specific_decl (new);
3381 TREE_CHAIN (new) = TREE_CHAIN (*parg);
3382 *parg = new;
3384 return orig_parm;
3387 /* Return a copy of the function's static chain. */
3388 static tree
3389 copy_static_chain (tree static_chain, copy_body_data * id)
3391 tree *chain_copy, *pvar;
3393 chain_copy = &static_chain;
3394 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
3396 tree new = remap_decl (*pvar, id);
3397 lang_hooks.dup_lang_specific_decl (new);
3398 TREE_CHAIN (new) = TREE_CHAIN (*pvar);
3399 *pvar = new;
3401 return static_chain;
3404 /* Return true if the function is allowed to be versioned.
3405 This is a guard for the versioning functionality. */
3406 bool
3407 tree_versionable_function_p (tree fndecl)
3409 if (fndecl == NULL_TREE)
3410 return false;
3411 /* ??? There are cases where a function is
3412 uninlinable but can be versioned. */
3413 if (!tree_inlinable_function_p (fndecl))
3414 return false;
3416 return true;
3419 /* Create a copy of a function's tree.
3420 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
3421 of the original function and the new copied function
3422 respectively. In case we want to replace a DECL
3423 tree with another tree while duplicating the function's
3424 body, TREE_MAP represents the mapping between these
3425 trees. If UPDATE_CLONES is set, the call_stmt fields
3426 of edges of clones of the function will be updated. */
3427 void
3428 tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
3429 bool update_clones)
3431 struct cgraph_node *old_version_node;
3432 struct cgraph_node *new_version_node;
3433 copy_body_data id;
3434 tree p;
3435 unsigned i;
3436 struct ipa_replace_map *replace_info;
3437 basic_block old_entry_block;
3438 tree t_step;
3439 tree old_current_function_decl = current_function_decl;
3441 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
3442 && TREE_CODE (new_decl) == FUNCTION_DECL);
3443 DECL_POSSIBLY_INLINED (old_decl) = 1;
3445 old_version_node = cgraph_node (old_decl);
3446 new_version_node = cgraph_node (new_decl);
3448 DECL_ARTIFICIAL (new_decl) = 1;
3449 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
3451 /* Prepare the data structures for the tree copy. */
3452 memset (&id, 0, sizeof (id));
3454 /* Generate a new name for the new version. */
3455 if (!update_clones)
3457 DECL_NAME (new_decl) = create_tmp_var_name (NULL);
3458 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
3459 SET_DECL_RTL (new_decl, NULL_RTX);
3460 id.statements_to_fold = pointer_set_create ();
3463 id.decl_map = pointer_map_create ();
3464 id.src_fn = old_decl;
3465 id.dst_fn = new_decl;
3466 id.src_node = old_version_node;
3467 id.dst_node = new_version_node;
3468 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
3470 id.copy_decl = copy_decl_no_change;
3471 id.transform_call_graph_edges
3472 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
3473 id.transform_new_cfg = true;
3474 id.transform_return_to_modify = false;
3475 id.transform_lang_insert_block = false;
3477 current_function_decl = new_decl;
3478 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
3479 (DECL_STRUCT_FUNCTION (old_decl));
3480 initialize_cfun (new_decl, old_decl,
3481 old_entry_block->count,
3482 old_entry_block->frequency);
3483 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
3485 /* Copy the function's static chain. */
3486 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
3487 if (p)
3488 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
3489 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
3490 &id);
3491 /* Copy the function's arguments. */
3492 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
3493 DECL_ARGUMENTS (new_decl) =
3494 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
3496 /* If there's a tree_map, prepare for substitution. */
3497 if (tree_map)
3498 for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
3500 replace_info = VARRAY_GENERIC_PTR (tree_map, i);
3501 if (replace_info->replace_p)
3502 insert_decl_map (&id, replace_info->old_tree,
3503 replace_info->new_tree);
3506 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
3508 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3509 number_blocks (id.dst_fn);
3511 if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
3512 /* Add local vars. */
3513 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
3514 t_step; t_step = TREE_CHAIN (t_step))
3516 tree var = TREE_VALUE (t_step);
3517 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3518 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
3519 cfun->unexpanded_var_list);
3520 else
3521 cfun->unexpanded_var_list =
3522 tree_cons (NULL_TREE, remap_decl (var, &id),
3523 cfun->unexpanded_var_list);
3526 /* Copy the Function's body. */
3527 copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
3529 if (DECL_RESULT (old_decl) != NULL_TREE)
3531 tree *res_decl = &DECL_RESULT (old_decl);
3532 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
3533 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
3536 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3537 number_blocks (new_decl);
3539 /* Clean up. */
3540 pointer_map_destroy (id.decl_map);
3541 if (!update_clones)
3543 fold_marked_statements (0, id.statements_to_fold);
3544 pointer_set_destroy (id.statements_to_fold);
3545 fold_cond_expr_cond ();
3547 if (gimple_in_ssa_p (cfun))
3549 free_dominance_info (CDI_DOMINATORS);
3550 free_dominance_info (CDI_POST_DOMINATORS);
3551 if (!update_clones)
3552 delete_unreachable_blocks ();
3553 update_ssa (TODO_update_ssa);
3554 if (!update_clones)
3556 fold_cond_expr_cond ();
3557 if (need_ssa_update_p ())
3558 update_ssa (TODO_update_ssa);
3561 free_dominance_info (CDI_DOMINATORS);
3562 free_dominance_info (CDI_POST_DOMINATORS);
3563 pop_cfun ();
3564 current_function_decl = old_current_function_decl;
3565 gcc_assert (!current_function_decl
3566 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
3567 return;
3570 /* Duplicate a type, fields and all. */
3572 tree
3573 build_duplicate_type (tree type)
3575 struct copy_body_data id;
3577 memset (&id, 0, sizeof (id));
3578 id.src_fn = current_function_decl;
3579 id.dst_fn = current_function_decl;
3580 id.src_cfun = cfun;
3581 id.decl_map = pointer_map_create ();
3582 id.copy_decl = copy_decl_no_change;
3584 type = remap_type_1 (type, &id);
3586 pointer_map_destroy (id.decl_map);
3588 return type;