2008-01-25 Douglas Gregor <doug.gregor@gmail.com>
[official-gcc.git] / gcc / tree-inline.c
blob636e37d8024c9632883b43eb7423aa4da32f67c8
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "varray.h"
36 #include "hashtab.h"
37 #include "langhooks.h"
38 #include "basic-block.h"
39 #include "tree-iterator.h"
40 #include "cgraph.h"
41 #include "intl.h"
42 #include "tree-mudflap.h"
43 #include "tree-flow.h"
44 #include "function.h"
45 #include "ggc.h"
46 #include "tree-flow.h"
47 #include "diagnostic.h"
48 #include "except.h"
49 #include "debug.h"
50 #include "pointer-set.h"
51 #include "ipa-prop.h"
52 #include "value-prof.h"
53 #include "tree-pass.h"
54 #include "target.h"
55 #include "integrate.h"
57 /* I'm not real happy about this, but we need to handle gimple and
58 non-gimple trees. */
59 #include "tree-gimple.h"
61 /* Inlining, Cloning, Versioning, Parallelization
63 Inlining: a function body is duplicated, but the PARM_DECLs are
64 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
65 GIMPLE_MODIFY_STMTs that store to a dedicated returned-value variable.
66 The duplicated eh_region info of the copy will later be appended
67 to the info for the caller; the eh_region info in copied throwing
68 statements and RESX_EXPRs is adjusted accordingly.
70 Cloning: (only in C++) We have one body for a con/de/structor, and
71 multiple function decls, each with a unique parameter list.
72 Duplicate the body, using the given splay tree; some parameters
73 will become constants (like 0 or 1).
75 Versioning: a function body is duplicated and the result is a new
76 function rather than into blocks of an existing function as with
77 inlining. Some parameters will become constants.
79 Parallelization: a region of a function is duplicated resulting in
80 a new function. Variables may be replaced with complex expressions
81 to enable shared variable semantics.
83 All of these will simultaneously lookup any callgraph edges. If
84 we're going to inline the duplicated function body, and the given
85 function has some cloned callgraph nodes (one for each place this
86 function will be inlined) those callgraph edges will be duplicated.
87 If we're cloning the body, those callgraph edges will be
88 updated to point into the new body. (Note that the original
89 callgraph node and edge list will not be altered.)
91 See the CALL_EXPR handling case in copy_body_r (). */
93 /* 0 if we should not perform inlining.
94 1 if we should expand functions calls inline at the tree level.
95 2 if we should consider *all* functions to be inline
96 candidates. */
98 int flag_inline_trees = 0;
100 /* To Do:
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
107 are not needed.
109 o Provide heuristics to clamp inlining of recursive template
110 calls? */
113 /* Weights that estimate_num_insns uses for heuristics in inlining. */
115 eni_weights eni_inlining_weights;
117 /* Weights that estimate_num_insns uses to estimate the size of the
118 produced code. */
120 eni_weights eni_size_weights;
122 /* Weights that estimate_num_insns uses to estimate the time necessary
123 to execute the produced code. */
125 eni_weights eni_time_weights;
127 /* Prototypes. */
129 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
130 static tree copy_generic_body (copy_body_data *);
131 static bool inlinable_function_p (tree);
132 static void remap_block (tree *, copy_body_data *);
133 static tree remap_decls (tree, copy_body_data *);
134 static void copy_bind_expr (tree *, int *, copy_body_data *);
135 static tree mark_local_for_remap_r (tree *, int *, void *);
136 static void unsave_expr_1 (tree);
137 static tree unsave_r (tree *, int *, void *);
138 static void declare_inline_vars (tree, tree);
139 static void remap_save_expr (tree *, void *, int *);
140 static void add_lexical_block (tree current_block, tree new_block);
141 static tree copy_decl_to_var (tree, copy_body_data *);
142 static tree copy_result_decl_to_var (tree, copy_body_data *);
143 static tree copy_decl_no_change (tree, copy_body_data *);
144 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
146 /* Insert a tree->tree mapping for ID. Despite the name suggests
147 that the trees should be variables, it is used for more than that. */
149 void
150 insert_decl_map (copy_body_data *id, tree key, tree value)
152 *pointer_map_insert (id->decl_map, key) = value;
154 /* Always insert an identity map as well. If we see this same new
155 node again, we won't want to duplicate it a second time. */
156 if (key != value)
157 *pointer_map_insert (id->decl_map, value) = value;
160 /* Construct new SSA name for old NAME. ID is the inline context. */
162 static tree
163 remap_ssa_name (tree name, copy_body_data *id)
165 tree new;
166 tree *n;
168 gcc_assert (TREE_CODE (name) == SSA_NAME);
170 n = (tree *) pointer_map_contains (id->decl_map, name);
171 if (n)
172 return *n;
174 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
175 in copy_bb. */
176 new = remap_decl (SSA_NAME_VAR (name), id);
177 /* We might've substituted constant or another SSA_NAME for
178 the variable.
180 Replace the SSA name representing RESULT_DECL by variable during
181 inlining: this saves us from need to introduce PHI node in a case
182 return value is just partly initialized. */
183 if ((TREE_CODE (new) == VAR_DECL || TREE_CODE (new) == PARM_DECL)
184 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
185 || !id->transform_return_to_modify))
187 new = make_ssa_name (new, NULL);
188 insert_decl_map (id, name, new);
189 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new)
190 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
191 TREE_TYPE (new) = TREE_TYPE (SSA_NAME_VAR (new));
192 if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
194 /* By inlining function having uninitialized variable, we might
195 extend the lifetime (variable might get reused). This cause
196 ICE in the case we end up extending lifetime of SSA name across
197 abnormal edge, but also increase register presure.
199 We simply initialize all uninitialized vars by 0 except for case
200 we are inlining to very first BB. We can avoid this for all
201 BBs that are not withing strongly connected regions of the CFG,
202 but this is bit expensive to test.
204 if (id->entry_bb && is_gimple_reg (SSA_NAME_VAR (name))
205 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
206 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
207 || EDGE_COUNT (id->entry_bb->preds) != 1))
209 block_stmt_iterator bsi = bsi_last (id->entry_bb);
210 tree init_stmt
211 = build_gimple_modify_stmt (new,
212 fold_convert (TREE_TYPE (new),
213 integer_zero_node));
214 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
215 SSA_NAME_DEF_STMT (new) = init_stmt;
216 SSA_NAME_IS_DEFAULT_DEF (new) = 0;
218 else
220 SSA_NAME_DEF_STMT (new) = build_empty_stmt ();
221 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) == name)
222 set_default_def (SSA_NAME_VAR (new), new);
226 else
227 insert_decl_map (id, name, new);
228 return new;
231 /* Remap DECL during the copying of the BLOCK tree for the function. */
233 tree
234 remap_decl (tree decl, copy_body_data *id)
236 tree *n;
237 tree fn;
239 /* We only remap local variables in the current function. */
240 fn = id->src_fn;
242 /* See if we have remapped this declaration. */
244 n = (tree *) pointer_map_contains (id->decl_map, decl);
246 /* If we didn't already have an equivalent for this declaration,
247 create one now. */
248 if (!n)
250 /* Make a copy of the variable or label. */
251 tree t = id->copy_decl (decl, id);
253 /* Remember it, so that if we encounter this local entity again
254 we can reuse this copy. Do this early because remap_type may
255 need this decl for TYPE_STUB_DECL. */
256 insert_decl_map (id, decl, t);
258 if (!DECL_P (t))
259 return t;
261 /* Remap types, if necessary. */
262 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
263 if (TREE_CODE (t) == TYPE_DECL)
264 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
266 /* Remap sizes as necessary. */
267 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
268 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
270 /* If fields, do likewise for offset and qualifier. */
271 if (TREE_CODE (t) == FIELD_DECL)
273 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
274 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
275 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
278 if (cfun && gimple_in_ssa_p (cfun)
279 && (TREE_CODE (t) == VAR_DECL
280 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
282 tree def = gimple_default_def (id->src_cfun, decl);
283 get_var_ann (t);
284 if (TREE_CODE (decl) != PARM_DECL && def)
286 tree map = remap_ssa_name (def, id);
287 /* Watch out RESULT_DECLs whose SSA names map directly
288 to them. */
289 if (TREE_CODE (map) == SSA_NAME
290 && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (map)))
291 set_default_def (t, map);
293 add_referenced_var (t);
295 return t;
298 return unshare_expr (*n);
301 static tree
302 remap_type_1 (tree type, copy_body_data *id)
304 tree new, t;
306 /* We do need a copy. build and register it now. If this is a pointer or
307 reference type, remap the designated type and make a new pointer or
308 reference type. */
309 if (TREE_CODE (type) == POINTER_TYPE)
311 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
312 TYPE_MODE (type),
313 TYPE_REF_CAN_ALIAS_ALL (type));
314 insert_decl_map (id, type, new);
315 return new;
317 else if (TREE_CODE (type) == REFERENCE_TYPE)
319 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
320 TYPE_MODE (type),
321 TYPE_REF_CAN_ALIAS_ALL (type));
322 insert_decl_map (id, type, new);
323 return new;
325 else
326 new = copy_node (type);
328 insert_decl_map (id, type, new);
330 /* This is a new type, not a copy of an old type. Need to reassociate
331 variants. We can handle everything except the main variant lazily. */
332 t = TYPE_MAIN_VARIANT (type);
333 if (type != t)
335 t = remap_type (t, id);
336 TYPE_MAIN_VARIANT (new) = t;
337 TYPE_NEXT_VARIANT (new) = TYPE_NEXT_VARIANT (t);
338 TYPE_NEXT_VARIANT (t) = new;
340 else
342 TYPE_MAIN_VARIANT (new) = new;
343 TYPE_NEXT_VARIANT (new) = NULL;
346 if (TYPE_STUB_DECL (type))
347 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
349 /* Lazily create pointer and reference types. */
350 TYPE_POINTER_TO (new) = NULL;
351 TYPE_REFERENCE_TO (new) = NULL;
353 switch (TREE_CODE (new))
355 case INTEGER_TYPE:
356 case REAL_TYPE:
357 case FIXED_POINT_TYPE:
358 case ENUMERAL_TYPE:
359 case BOOLEAN_TYPE:
360 t = TYPE_MIN_VALUE (new);
361 if (t && TREE_CODE (t) != INTEGER_CST)
362 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
364 t = TYPE_MAX_VALUE (new);
365 if (t && TREE_CODE (t) != INTEGER_CST)
366 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
367 return new;
369 case FUNCTION_TYPE:
370 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
371 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
372 return new;
374 case ARRAY_TYPE:
375 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
376 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
377 break;
379 case RECORD_TYPE:
380 case UNION_TYPE:
381 case QUAL_UNION_TYPE:
383 tree f, nf = NULL;
385 for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
387 t = remap_decl (f, id);
388 DECL_CONTEXT (t) = new;
389 TREE_CHAIN (t) = nf;
390 nf = t;
392 TYPE_FIELDS (new) = nreverse (nf);
394 break;
396 case OFFSET_TYPE:
397 default:
398 /* Shouldn't have been thought variable sized. */
399 gcc_unreachable ();
402 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
403 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
405 return new;
408 tree
409 remap_type (tree type, copy_body_data *id)
411 tree *node;
413 if (type == NULL)
414 return type;
416 /* See if we have remapped this type. */
417 node = (tree *) pointer_map_contains (id->decl_map, type);
418 if (node)
419 return *node;
421 /* The type only needs remapping if it's variably modified. */
422 if (! variably_modified_type_p (type, id->src_fn))
424 insert_decl_map (id, type, type);
425 return type;
428 return remap_type_1 (type, id);
431 static tree
432 remap_decls (tree decls, copy_body_data *id)
434 tree old_var;
435 tree new_decls = NULL_TREE;
437 /* Remap its variables. */
438 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
440 tree new_var;
442 /* We can not chain the local static declarations into the unexpanded_var_list
443 as we can't duplicate them or break one decl rule. Go ahead and link
444 them into unexpanded_var_list. */
445 if (!auto_var_in_fn_p (old_var, id->src_fn)
446 && !DECL_EXTERNAL (old_var))
448 cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
449 cfun->unexpanded_var_list);
450 continue;
453 /* Remap the variable. */
454 new_var = remap_decl (old_var, id);
456 /* If we didn't remap this variable, so we can't mess with its
457 TREE_CHAIN. If we remapped this variable to the return slot, it's
458 already declared somewhere else, so don't declare it here. */
459 if (!new_var || new_var == id->retvar)
461 else
463 gcc_assert (DECL_P (new_var));
464 TREE_CHAIN (new_var) = new_decls;
465 new_decls = new_var;
469 return nreverse (new_decls);
472 /* Copy the BLOCK to contain remapped versions of the variables
473 therein. And hook the new block into the block-tree. */
475 static void
476 remap_block (tree *block, copy_body_data *id)
478 tree old_block;
479 tree new_block;
480 tree fn;
482 /* Make the new block. */
483 old_block = *block;
484 new_block = make_node (BLOCK);
485 TREE_USED (new_block) = TREE_USED (old_block);
486 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
487 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
488 *block = new_block;
490 /* Remap its variables. */
491 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
493 fn = id->dst_fn;
495 if (id->transform_lang_insert_block)
496 lang_hooks.decls.insert_block (new_block);
498 /* Remember the remapped block. */
499 insert_decl_map (id, old_block, new_block);
502 /* Copy the whole block tree and root it in id->block. */
503 static tree
504 remap_blocks (tree block, copy_body_data *id)
506 tree t;
507 tree new = block;
509 if (!block)
510 return NULL;
512 remap_block (&new, id);
513 gcc_assert (new != block);
514 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
515 add_lexical_block (new, remap_blocks (t, id));
516 return new;
519 static void
520 copy_statement_list (tree *tp)
522 tree_stmt_iterator oi, ni;
523 tree new;
525 new = alloc_stmt_list ();
526 ni = tsi_start (new);
527 oi = tsi_start (*tp);
528 *tp = new;
530 for (; !tsi_end_p (oi); tsi_next (&oi))
531 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
534 static void
535 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
537 tree block = BIND_EXPR_BLOCK (*tp);
538 /* Copy (and replace) the statement. */
539 copy_tree_r (tp, walk_subtrees, NULL);
540 if (block)
542 remap_block (&block, id);
543 BIND_EXPR_BLOCK (*tp) = block;
546 if (BIND_EXPR_VARS (*tp))
547 /* This will remap a lot of the same decls again, but this should be
548 harmless. */
549 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
552 /* Called from copy_body_id via walk_tree. DATA is really an
553 `copy_body_data *'. */
555 tree
556 copy_body_r (tree *tp, int *walk_subtrees, void *data)
558 copy_body_data *id = (copy_body_data *) data;
559 tree fn = id->src_fn;
560 tree new_block;
562 /* Begin by recognizing trees that we'll completely rewrite for the
563 inlining context. Our output for these trees is completely
564 different from out input (e.g. RETURN_EXPR is deleted, and morphs
565 into an edge). Further down, we'll handle trees that get
566 duplicated and/or tweaked. */
568 /* When requested, RETURN_EXPRs should be transformed to just the
569 contained GIMPLE_MODIFY_STMT. The branch semantics of the return will
570 be handled elsewhere by manipulating the CFG rather than a statement. */
571 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
573 tree assignment = TREE_OPERAND (*tp, 0);
575 /* If we're returning something, just turn that into an
576 assignment into the equivalent of the original RESULT_DECL.
577 If the "assignment" is just the result decl, the result
578 decl has already been set (e.g. a recent "foo (&result_decl,
579 ...)"); just toss the entire RETURN_EXPR. */
580 if (assignment && TREE_CODE (assignment) == GIMPLE_MODIFY_STMT)
582 /* Replace the RETURN_EXPR with (a copy of) the
583 GIMPLE_MODIFY_STMT hanging underneath. */
584 *tp = copy_node (assignment);
586 else /* Else the RETURN_EXPR returns no value. */
588 *tp = NULL;
589 return (tree) (void *)1;
592 else if (TREE_CODE (*tp) == SSA_NAME)
594 *tp = remap_ssa_name (*tp, id);
595 *walk_subtrees = 0;
596 return NULL;
599 /* Local variables and labels need to be replaced by equivalent
600 variables. We don't want to copy static variables; there's only
601 one of those, no matter how many times we inline the containing
602 function. Similarly for globals from an outer function. */
603 else if (auto_var_in_fn_p (*tp, fn))
605 tree new_decl;
607 /* Remap the declaration. */
608 new_decl = remap_decl (*tp, id);
609 gcc_assert (new_decl);
610 /* Replace this variable with the copy. */
611 STRIP_TYPE_NOPS (new_decl);
612 *tp = new_decl;
613 *walk_subtrees = 0;
615 else if (TREE_CODE (*tp) == STATEMENT_LIST)
616 copy_statement_list (tp);
617 else if (TREE_CODE (*tp) == SAVE_EXPR)
618 remap_save_expr (tp, id->decl_map, walk_subtrees);
619 else if (TREE_CODE (*tp) == LABEL_DECL
620 && (! DECL_CONTEXT (*tp)
621 || decl_function_context (*tp) == id->src_fn))
622 /* These may need to be remapped for EH handling. */
623 *tp = remap_decl (*tp, id);
624 else if (TREE_CODE (*tp) == BIND_EXPR)
625 copy_bind_expr (tp, walk_subtrees, id);
626 /* Types may need remapping as well. */
627 else if (TYPE_P (*tp))
628 *tp = remap_type (*tp, id);
630 /* If this is a constant, we have to copy the node iff the type will be
631 remapped. copy_tree_r will not copy a constant. */
632 else if (CONSTANT_CLASS_P (*tp))
634 tree new_type = remap_type (TREE_TYPE (*tp), id);
636 if (new_type == TREE_TYPE (*tp))
637 *walk_subtrees = 0;
639 else if (TREE_CODE (*tp) == INTEGER_CST)
640 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
641 TREE_INT_CST_HIGH (*tp));
642 else
644 *tp = copy_node (*tp);
645 TREE_TYPE (*tp) = new_type;
649 /* Otherwise, just copy the node. Note that copy_tree_r already
650 knows not to copy VAR_DECLs, etc., so this is safe. */
651 else
653 /* Here we handle trees that are not completely rewritten.
654 First we detect some inlining-induced bogosities for
655 discarding. */
656 if (TREE_CODE (*tp) == GIMPLE_MODIFY_STMT
657 && GIMPLE_STMT_OPERAND (*tp, 0) == GIMPLE_STMT_OPERAND (*tp, 1)
658 && (auto_var_in_fn_p (GIMPLE_STMT_OPERAND (*tp, 0), fn)))
660 /* Some assignments VAR = VAR; don't generate any rtl code
661 and thus don't count as variable modification. Avoid
662 keeping bogosities like 0 = 0. */
663 tree decl = GIMPLE_STMT_OPERAND (*tp, 0), value;
664 tree *n;
666 n = (tree *) pointer_map_contains (id->decl_map, decl);
667 if (n)
669 value = *n;
670 STRIP_TYPE_NOPS (value);
671 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
673 *tp = build_empty_stmt ();
674 return copy_body_r (tp, walk_subtrees, data);
678 else if (TREE_CODE (*tp) == INDIRECT_REF)
680 /* Get rid of *& from inline substitutions that can happen when a
681 pointer argument is an ADDR_EXPR. */
682 tree decl = TREE_OPERAND (*tp, 0);
683 tree *n;
685 n = (tree *) pointer_map_contains (id->decl_map, decl);
686 if (n)
688 tree new;
689 tree old;
690 /* If we happen to get an ADDR_EXPR in n->value, strip
691 it manually here as we'll eventually get ADDR_EXPRs
692 which lie about their types pointed to. In this case
693 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
694 but we absolutely rely on that. As fold_indirect_ref
695 does other useful transformations, try that first, though. */
696 tree type = TREE_TYPE (TREE_TYPE (*n));
697 new = unshare_expr (*n);
698 old = *tp;
699 *tp = gimple_fold_indirect_ref (new);
700 if (! *tp)
702 if (TREE_CODE (new) == ADDR_EXPR)
704 *tp = fold_indirect_ref_1 (type, new);
705 /* ??? We should either assert here or build
706 a VIEW_CONVERT_EXPR instead of blindly leaking
707 incompatible types to our IL. */
708 if (! *tp)
709 *tp = TREE_OPERAND (new, 0);
711 else
713 *tp = build1 (INDIRECT_REF, type, new);
714 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
717 *walk_subtrees = 0;
718 return NULL;
722 /* Here is the "usual case". Copy this tree node, and then
723 tweak some special cases. */
724 copy_tree_r (tp, walk_subtrees, NULL);
726 /* Global variables we didn't seen yet needs to go into referenced
727 vars. */
728 if (gimple_in_ssa_p (cfun) && TREE_CODE (*tp) == VAR_DECL)
729 add_referenced_var (*tp);
731 /* If EXPR has block defined, map it to newly constructed block.
732 When inlining we want EXPRs without block appear in the block
733 of function call. */
734 if (EXPR_P (*tp) || GIMPLE_STMT_P (*tp))
736 new_block = id->block;
737 if (TREE_BLOCK (*tp))
739 tree *n;
740 n = (tree *) pointer_map_contains (id->decl_map,
741 TREE_BLOCK (*tp));
742 gcc_assert (n);
743 new_block = *n;
745 TREE_BLOCK (*tp) = new_block;
748 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
749 TREE_OPERAND (*tp, 0) =
750 build_int_cst
751 (NULL_TREE,
752 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
754 if (!GIMPLE_TUPLE_P (*tp) && TREE_CODE (*tp) != OMP_CLAUSE)
755 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
757 /* The copied TARGET_EXPR has never been expanded, even if the
758 original node was expanded already. */
759 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
761 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
762 TREE_OPERAND (*tp, 3) = NULL_TREE;
765 /* Variable substitution need not be simple. In particular, the
766 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
767 and friends are up-to-date. */
768 else if (TREE_CODE (*tp) == ADDR_EXPR)
770 int invariant = TREE_INVARIANT (*tp);
771 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
772 /* Handle the case where we substituted an INDIRECT_REF
773 into the operand of the ADDR_EXPR. */
774 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
775 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
776 else
777 recompute_tree_invariant_for_addr_expr (*tp);
778 /* If this used to be invariant, but is not any longer,
779 then regimplification is probably needed. */
780 if (invariant && !TREE_INVARIANT (*tp))
781 id->regimplify = true;
782 *walk_subtrees = 0;
786 /* Keep iterating. */
787 return NULL_TREE;
790 /* Copy basic block, scale profile accordingly. Edges will be taken care of
791 later */
793 static basic_block
794 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
796 block_stmt_iterator bsi, copy_bsi;
797 basic_block copy_basic_block;
799 /* create_basic_block() will append every new block to
800 basic_block_info automatically. */
801 copy_basic_block = create_basic_block (NULL, (void *) 0,
802 (basic_block) bb->prev_bb->aux);
803 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
805 /* We are going to rebuild frequencies from scratch. These values have just
806 small importance to drive canonicalize_loop_headers. */
807 copy_basic_block->frequency = ((gcov_type)bb->frequency
808 * frequency_scale / REG_BR_PROB_BASE);
809 if (copy_basic_block->frequency > BB_FREQ_MAX)
810 copy_basic_block->frequency = BB_FREQ_MAX;
811 copy_bsi = bsi_start (copy_basic_block);
813 for (bsi = bsi_start (bb);
814 !bsi_end_p (bsi); bsi_next (&bsi))
816 tree stmt = bsi_stmt (bsi);
817 tree orig_stmt = stmt;
819 id->regimplify = false;
820 walk_tree (&stmt, copy_body_r, id, NULL);
822 /* RETURN_EXPR might be removed,
823 this is signalled by making stmt pointer NULL. */
824 if (stmt)
826 tree call, decl;
828 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
830 /* With return slot optimization we can end up with
831 non-gimple (foo *)&this->m, fix that here. */
832 if ((TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
833 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
834 && !is_gimple_val (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0)))
835 || id->regimplify)
836 gimplify_stmt (&stmt);
838 bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
840 /* Process new statement. gimplify_stmt possibly turned statement
841 into multiple statements, we need to process all of them. */
842 while (!bsi_end_p (copy_bsi))
844 tree *stmtp = bsi_stmt_ptr (copy_bsi);
845 tree stmt = *stmtp;
846 call = get_call_expr_in (stmt);
848 if (call && CALL_EXPR_VA_ARG_PACK (call) && id->call_expr)
850 /* __builtin_va_arg_pack () should be replaced by
851 all arguments corresponding to ... in the caller. */
852 tree p, *argarray, new_call, *call_ptr;
853 int nargs = call_expr_nargs (id->call_expr);
855 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
856 nargs--;
858 argarray = (tree *) alloca ((nargs + call_expr_nargs (call))
859 * sizeof (tree));
861 memcpy (argarray, CALL_EXPR_ARGP (call),
862 call_expr_nargs (call) * sizeof (*argarray));
863 memcpy (argarray + call_expr_nargs (call),
864 CALL_EXPR_ARGP (id->call_expr)
865 + (call_expr_nargs (id->call_expr) - nargs),
866 nargs * sizeof (*argarray));
868 new_call = build_call_array (TREE_TYPE (call),
869 CALL_EXPR_FN (call),
870 nargs + call_expr_nargs (call),
871 argarray);
872 /* Copy all CALL_EXPR flags, locus and block, except
873 CALL_EXPR_VA_ARG_PACK flag. */
874 CALL_EXPR_STATIC_CHAIN (new_call)
875 = CALL_EXPR_STATIC_CHAIN (call);
876 CALL_EXPR_TAILCALL (new_call) = CALL_EXPR_TAILCALL (call);
877 CALL_EXPR_RETURN_SLOT_OPT (new_call)
878 = CALL_EXPR_RETURN_SLOT_OPT (call);
879 CALL_FROM_THUNK_P (new_call) = CALL_FROM_THUNK_P (call);
880 CALL_CANNOT_INLINE_P (new_call)
881 = CALL_CANNOT_INLINE_P (call);
882 TREE_NOTHROW (new_call) = TREE_NOTHROW (call);
883 SET_EXPR_LOCUS (new_call, EXPR_LOCUS (call));
884 TREE_BLOCK (new_call) = TREE_BLOCK (call);
886 call_ptr = stmtp;
887 if (TREE_CODE (*call_ptr) == GIMPLE_MODIFY_STMT)
888 call_ptr = &GIMPLE_STMT_OPERAND (*call_ptr, 1);
889 if (TREE_CODE (*call_ptr) == WITH_SIZE_EXPR)
890 call_ptr = &TREE_OPERAND (*call_ptr, 0);
891 gcc_assert (*call_ptr == call);
892 if (call_ptr == stmtp)
894 bsi_replace (&copy_bsi, new_call, true);
895 stmtp = bsi_stmt_ptr (copy_bsi);
896 stmt = *stmtp;
898 else
900 *call_ptr = new_call;
901 stmt = *stmtp;
902 update_stmt (stmt);
905 else if (call
906 && id->call_expr
907 && (decl = get_callee_fndecl (call))
908 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
909 && DECL_FUNCTION_CODE (decl)
910 == BUILT_IN_VA_ARG_PACK_LEN)
912 /* __builtin_va_arg_pack_len () should be replaced by
913 the number of anonymous arguments. */
914 int nargs = call_expr_nargs (id->call_expr);
915 tree count, *call_ptr, p;
917 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
918 nargs--;
920 count = build_int_cst (integer_type_node, nargs);
921 call_ptr = stmtp;
922 if (TREE_CODE (*call_ptr) == GIMPLE_MODIFY_STMT)
923 call_ptr = &GIMPLE_STMT_OPERAND (*call_ptr, 1);
924 if (TREE_CODE (*call_ptr) == WITH_SIZE_EXPR)
925 call_ptr = &TREE_OPERAND (*call_ptr, 0);
926 gcc_assert (*call_ptr == call && call_ptr != stmtp);
927 *call_ptr = count;
928 stmt = *stmtp;
929 update_stmt (stmt);
930 call = NULL_TREE;
933 /* Statements produced by inlining can be unfolded, especially
934 when we constant propagated some operands. We can't fold
935 them right now for two reasons:
936 1) folding require SSA_NAME_DEF_STMTs to be correct
937 2) we can't change function calls to builtins.
938 So we just mark statement for later folding. We mark
939 all new statements, instead just statements that has changed
940 by some nontrivial substitution so even statements made
941 foldable indirectly are updated. If this turns out to be
942 expensive, copy_body can be told to watch for nontrivial
943 changes. */
944 if (id->statements_to_fold)
945 pointer_set_insert (id->statements_to_fold, stmt);
946 /* We're duplicating a CALL_EXPR. Find any corresponding
947 callgraph edges and update or duplicate them. */
948 if (call && (decl = get_callee_fndecl (call)))
950 struct cgraph_node *node;
951 struct cgraph_edge *edge;
953 switch (id->transform_call_graph_edges)
955 case CB_CGE_DUPLICATE:
956 edge = cgraph_edge (id->src_node, orig_stmt);
957 if (edge)
958 cgraph_clone_edge (edge, id->dst_node, stmt,
959 REG_BR_PROB_BASE, 1, edge->frequency, true);
960 break;
962 case CB_CGE_MOVE_CLONES:
963 for (node = id->dst_node->next_clone;
964 node;
965 node = node->next_clone)
967 edge = cgraph_edge (node, orig_stmt);
968 gcc_assert (edge);
969 cgraph_set_call_stmt (edge, stmt);
971 /* FALLTHRU */
973 case CB_CGE_MOVE:
974 edge = cgraph_edge (id->dst_node, orig_stmt);
975 if (edge)
976 cgraph_set_call_stmt (edge, stmt);
977 break;
979 default:
980 gcc_unreachable ();
983 /* If you think we can abort here, you are wrong.
984 There is no region 0 in tree land. */
985 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
986 != 0);
988 if (tree_could_throw_p (stmt)
989 /* When we are cloning for inlining, we are supposed to
990 construct a clone that calls precisely the same functions
991 as original. However IPA optimizers might've proved
992 earlier some function calls as non-trapping that might
993 render some basic blocks dead that might become
994 unreachable.
996 We can't update SSA with unreachable blocks in CFG and thus
997 we prevent the scenario by preserving even the "dead" eh
998 edges until the point they are later removed by
999 fixup_cfg pass. */
1000 || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
1001 && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
1003 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
1004 /* Add an entry for the copied tree in the EH hashtable.
1005 When cloning or versioning, use the hashtable in
1006 cfun, and just copy the EH number. When inlining, use the
1007 hashtable in the caller, and adjust the region number. */
1008 if (region > 0)
1009 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
1011 /* If this tree doesn't have a region associated with it,
1012 and there is a "current region,"
1013 then associate this tree with the current region
1014 and add edges associated with this region. */
1015 if ((lookup_stmt_eh_region_fn (id->src_cfun,
1016 orig_stmt) <= 0
1017 && id->eh_region > 0)
1018 && tree_could_throw_p (stmt))
1019 add_stmt_to_eh_region (stmt, id->eh_region);
1021 if (gimple_in_ssa_p (cfun))
1023 ssa_op_iter i;
1024 tree def;
1026 find_new_referenced_vars (bsi_stmt_ptr (copy_bsi));
1027 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1028 if (TREE_CODE (def) == SSA_NAME)
1029 SSA_NAME_DEF_STMT (def) = stmt;
1031 bsi_next (&copy_bsi);
1033 copy_bsi = bsi_last (copy_basic_block);
1036 return copy_basic_block;
1039 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1040 form is quite easy, since dominator relationship for old basic blocks does
1041 not change.
1043 There is however exception where inlining might change dominator relation
1044 across EH edges from basic block within inlined functions destinating
1045 to landing pads in function we inline into.
1047 The function fills in PHI_RESULTs of such PHI nodes if they refer
1048 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1049 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1050 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1051 set, and this means that there will be no overlapping live ranges
1052 for the underlying symbol.
1054 This might change in future if we allow redirecting of EH edges and
1055 we might want to change way build CFG pre-inlining to include
1056 all the possible edges then. */
1057 static void
1058 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1059 bool can_throw, bool nonlocal_goto)
1061 edge e;
1062 edge_iterator ei;
1064 FOR_EACH_EDGE (e, ei, bb->succs)
1065 if (!e->dest->aux
1066 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1068 tree phi;
1070 gcc_assert (e->flags & EDGE_ABNORMAL);
1071 if (!nonlocal_goto)
1072 gcc_assert (e->flags & EDGE_EH);
1073 if (!can_throw)
1074 gcc_assert (!(e->flags & EDGE_EH));
1075 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
1077 edge re;
1079 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1080 gcc_assert (!e->dest->aux);
1082 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
1083 (PHI_RESULT (phi)));
1085 if (!is_gimple_reg (PHI_RESULT (phi)))
1087 mark_sym_for_renaming
1088 (SSA_NAME_VAR (PHI_RESULT (phi)));
1089 continue;
1092 re = find_edge (ret_bb, e->dest);
1093 gcc_assert (re);
1094 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1095 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1097 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1098 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1103 /* Copy edges from BB into its copy constructed earlier, scale profile
1104 accordingly. Edges will be taken care of later. Assume aux
1105 pointers to point to the copies of each BB. */
1106 static void
1107 copy_edges_for_bb (basic_block bb, int count_scale, basic_block ret_bb)
1109 basic_block new_bb = (basic_block) bb->aux;
1110 edge_iterator ei;
1111 edge old_edge;
1112 block_stmt_iterator bsi;
1113 int flags;
1115 /* Use the indices from the original blocks to create edges for the
1116 new ones. */
1117 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1118 if (!(old_edge->flags & EDGE_EH))
1120 edge new;
1122 flags = old_edge->flags;
1124 /* Return edges do get a FALLTHRU flag when the get inlined. */
1125 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1126 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1127 flags |= EDGE_FALLTHRU;
1128 new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1129 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1130 new->probability = old_edge->probability;
1133 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1134 return;
1136 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
1138 tree copy_stmt;
1139 bool can_throw, nonlocal_goto;
1141 copy_stmt = bsi_stmt (bsi);
1142 update_stmt (copy_stmt);
1143 if (gimple_in_ssa_p (cfun))
1144 mark_symbols_for_renaming (copy_stmt);
1145 /* Do this before the possible split_block. */
1146 bsi_next (&bsi);
1148 /* If this tree could throw an exception, there are two
1149 cases where we need to add abnormal edge(s): the
1150 tree wasn't in a region and there is a "current
1151 region" in the caller; or the original tree had
1152 EH edges. In both cases split the block after the tree,
1153 and add abnormal edge(s) as needed; we need both
1154 those from the callee and the caller.
1155 We check whether the copy can throw, because the const
1156 propagation can change an INDIRECT_REF which throws
1157 into a COMPONENT_REF which doesn't. If the copy
1158 can throw, the original could also throw. */
1160 can_throw = tree_can_throw_internal (copy_stmt);
1161 nonlocal_goto = tree_can_make_abnormal_goto (copy_stmt);
1163 if (can_throw || nonlocal_goto)
1165 if (!bsi_end_p (bsi))
1166 /* Note that bb's predecessor edges aren't necessarily
1167 right at this point; split_block doesn't care. */
1169 edge e = split_block (new_bb, copy_stmt);
1171 new_bb = e->dest;
1172 new_bb->aux = e->src->aux;
1173 bsi = bsi_start (new_bb);
1177 if (can_throw)
1178 make_eh_edges (copy_stmt);
1180 if (nonlocal_goto)
1181 make_abnormal_goto_edges (bb_for_stmt (copy_stmt), true);
1183 if ((can_throw || nonlocal_goto)
1184 && gimple_in_ssa_p (cfun))
1185 update_ssa_across_abnormal_edges (bb_for_stmt (copy_stmt), ret_bb,
1186 can_throw, nonlocal_goto);
1190 /* Copy the PHIs. All blocks and edges are copied, some blocks
1191 was possibly split and new outgoing EH edges inserted.
1192 BB points to the block of original function and AUX pointers links
1193 the original and newly copied blocks. */
1195 static void
1196 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1198 basic_block new_bb = bb->aux;
1199 edge_iterator ei;
1200 tree phi;
1202 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1204 tree res = PHI_RESULT (phi);
1205 tree new_res = res;
1206 tree new_phi;
1207 edge new_edge;
1209 if (is_gimple_reg (res))
1211 walk_tree (&new_res, copy_body_r, id, NULL);
1212 SSA_NAME_DEF_STMT (new_res)
1213 = new_phi = create_phi_node (new_res, new_bb);
1214 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1216 edge old_edge = find_edge (new_edge->src->aux, bb);
1217 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1218 tree new_arg = arg;
1220 walk_tree (&new_arg, copy_body_r, id, NULL);
1221 gcc_assert (new_arg);
1222 /* With return slot optimization we can end up with
1223 non-gimple (foo *)&this->m, fix that here. */
1224 if (TREE_CODE (new_arg) != SSA_NAME
1225 && TREE_CODE (new_arg) != FUNCTION_DECL
1226 && !is_gimple_val (new_arg))
1228 tree stmts = NULL_TREE;
1229 new_arg = force_gimple_operand (new_arg, &stmts,
1230 true, NULL);
1231 bsi_insert_on_edge_immediate (new_edge, stmts);
1233 add_phi_arg (new_phi, new_arg, new_edge);
1239 /* Wrapper for remap_decl so it can be used as a callback. */
1240 static tree
1241 remap_decl_1 (tree decl, void *data)
1243 return remap_decl (decl, (copy_body_data *) data);
1246 /* Build struct function and associated datastructures for the new clone
1247 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1249 static void
1250 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1251 int frequency)
1253 struct function *new_cfun
1254 = (struct function *) ggc_alloc_cleared (sizeof (struct function));
1255 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1256 int count_scale, frequency_scale;
1258 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1259 count_scale = (REG_BR_PROB_BASE * count
1260 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1261 else
1262 count_scale = 1;
1264 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1265 frequency_scale = (REG_BR_PROB_BASE * frequency
1267 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1268 else
1269 frequency_scale = count_scale;
1271 /* Register specific tree functions. */
1272 tree_register_cfg_hooks ();
1273 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
1274 new_cfun->funcdef_no = get_next_funcdef_no ();
1275 VALUE_HISTOGRAMS (new_cfun) = NULL;
1276 new_cfun->unexpanded_var_list = NULL;
1277 new_cfun->cfg = NULL;
1278 new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
1279 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
1280 push_cfun (new_cfun);
1281 init_empty_tree_cfg ();
1283 ENTRY_BLOCK_PTR->count =
1284 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1285 REG_BR_PROB_BASE);
1286 ENTRY_BLOCK_PTR->frequency =
1287 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1288 frequency_scale / REG_BR_PROB_BASE);
1289 EXIT_BLOCK_PTR->count =
1290 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1291 REG_BR_PROB_BASE);
1292 EXIT_BLOCK_PTR->frequency =
1293 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1294 frequency_scale / REG_BR_PROB_BASE);
1295 if (src_cfun->eh)
1296 init_eh_for_function ();
1298 if (src_cfun->gimple_df)
1300 init_tree_ssa ();
1301 cfun->gimple_df->in_ssa_p = true;
1302 init_ssa_operands ();
1304 pop_cfun ();
1307 /* Make a copy of the body of FN so that it can be inserted inline in
1308 another function. Walks FN via CFG, returns new fndecl. */
1310 static tree
1311 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
1312 basic_block entry_block_map, basic_block exit_block_map)
1314 tree callee_fndecl = id->src_fn;
1315 /* Original cfun for the callee, doesn't change. */
1316 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1317 struct function *cfun_to_copy;
1318 basic_block bb;
1319 tree new_fndecl = NULL;
1320 int count_scale, frequency_scale;
1321 int last;
1323 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1324 count_scale = (REG_BR_PROB_BASE * count
1325 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1326 else
1327 count_scale = 1;
1329 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1330 frequency_scale = (REG_BR_PROB_BASE * frequency
1332 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1333 else
1334 frequency_scale = count_scale;
1336 /* Register specific tree functions. */
1337 tree_register_cfg_hooks ();
1339 /* Must have a CFG here at this point. */
1340 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1341 (DECL_STRUCT_FUNCTION (callee_fndecl)));
1343 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1346 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1347 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
1348 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1349 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1351 /* Duplicate any exception-handling regions. */
1352 if (cfun->eh)
1354 id->eh_region_offset
1355 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
1356 0, id->eh_region);
1358 /* Use aux pointers to map the original blocks to copy. */
1359 FOR_EACH_BB_FN (bb, cfun_to_copy)
1361 basic_block new = copy_bb (id, bb, frequency_scale, count_scale);
1362 bb->aux = new;
1363 new->aux = bb;
1366 last = last_basic_block;
1367 /* Now that we've duplicated the blocks, duplicate their edges. */
1368 FOR_ALL_BB_FN (bb, cfun_to_copy)
1369 copy_edges_for_bb (bb, count_scale, exit_block_map);
1370 if (gimple_in_ssa_p (cfun))
1371 FOR_ALL_BB_FN (bb, cfun_to_copy)
1372 copy_phis_for_bb (bb, id);
1373 FOR_ALL_BB_FN (bb, cfun_to_copy)
1375 ((basic_block)bb->aux)->aux = NULL;
1376 bb->aux = NULL;
1378 /* Zero out AUX fields of newly created block during EH edge
1379 insertion. */
1380 for (; last < last_basic_block; last++)
1381 BASIC_BLOCK (last)->aux = NULL;
1382 entry_block_map->aux = NULL;
1383 exit_block_map->aux = NULL;
1385 return new_fndecl;
1388 /* Make a copy of the body of FN so that it can be inserted inline in
1389 another function. */
1391 static tree
1392 copy_generic_body (copy_body_data *id)
1394 tree body;
1395 tree fndecl = id->src_fn;
1397 body = DECL_SAVED_TREE (fndecl);
1398 walk_tree (&body, copy_body_r, id, NULL);
1400 return body;
1403 static tree
1404 copy_body (copy_body_data *id, gcov_type count, int frequency,
1405 basic_block entry_block_map, basic_block exit_block_map)
1407 tree fndecl = id->src_fn;
1408 tree body;
1410 /* If this body has a CFG, walk CFG and copy. */
1411 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1412 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1414 return body;
1417 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
1418 defined in function FN, or of a data member thereof. */
1420 static bool
1421 self_inlining_addr_expr (tree value, tree fn)
1423 tree var;
1425 if (TREE_CODE (value) != ADDR_EXPR)
1426 return false;
1428 var = get_base_address (TREE_OPERAND (value, 0));
1430 return var && auto_var_in_fn_p (var, fn);
1433 static void
1434 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
1435 basic_block bb, tree *vars)
1437 tree init_stmt;
1438 tree var;
1439 tree var_sub;
1440 tree rhs = value;
1441 tree def = (gimple_in_ssa_p (cfun)
1442 ? gimple_default_def (id->src_cfun, p) : NULL);
1444 if (value
1445 && value != error_mark_node
1446 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
1448 if (fold_convertible_p (TREE_TYPE (p), value))
1449 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
1450 else
1451 /* ??? For valid (GIMPLE) programs we should not end up here.
1452 Still if something has gone wrong and we end up with truly
1453 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
1454 to not leak invalid GIMPLE to the following passes. */
1455 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
1458 /* If the parameter is never assigned to, has no SSA_NAMEs created,
1459 we may not need to create a new variable here at all. Instead, we may
1460 be able to just use the argument value. */
1461 if (TREE_READONLY (p)
1462 && !TREE_ADDRESSABLE (p)
1463 && value && !TREE_SIDE_EFFECTS (value)
1464 && !def)
1466 /* We may produce non-gimple trees by adding NOPs or introduce
1467 invalid sharing when operand is not really constant.
1468 It is not big deal to prohibit constant propagation here as
1469 we will constant propagate in DOM1 pass anyway. */
1470 if (is_gimple_min_invariant (value)
1471 && useless_type_conversion_p (TREE_TYPE (p),
1472 TREE_TYPE (value))
1473 /* We have to be very careful about ADDR_EXPR. Make sure
1474 the base variable isn't a local variable of the inlined
1475 function, e.g., when doing recursive inlining, direct or
1476 mutually-recursive or whatever, which is why we don't
1477 just test whether fn == current_function_decl. */
1478 && ! self_inlining_addr_expr (value, fn))
1480 insert_decl_map (id, p, value);
1481 return;
1485 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1486 here since the type of this decl must be visible to the calling
1487 function. */
1488 var = copy_decl_to_var (p, id);
1489 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
1491 get_var_ann (var);
1492 add_referenced_var (var);
1495 /* See if the frontend wants to pass this by invisible reference. If
1496 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1497 replace uses of the PARM_DECL with dereferences. */
1498 if (TREE_TYPE (var) != TREE_TYPE (p)
1499 && POINTER_TYPE_P (TREE_TYPE (var))
1500 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1502 insert_decl_map (id, var, var);
1503 var_sub = build_fold_indirect_ref (var);
1505 else
1506 var_sub = var;
1508 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1509 that way, when the PARM_DECL is encountered, it will be
1510 automatically replaced by the VAR_DECL. */
1511 insert_decl_map (id, p, var_sub);
1513 /* Declare this new variable. */
1514 TREE_CHAIN (var) = *vars;
1515 *vars = var;
1517 /* Make gimplifier happy about this variable. */
1518 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1520 /* Even if P was TREE_READONLY, the new VAR should not be.
1521 In the original code, we would have constructed a
1522 temporary, and then the function body would have never
1523 changed the value of P. However, now, we will be
1524 constructing VAR directly. The constructor body may
1525 change its value multiple times as it is being
1526 constructed. Therefore, it must not be TREE_READONLY;
1527 the back-end assumes that TREE_READONLY variable is
1528 assigned to only once. */
1529 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1530 TREE_READONLY (var) = 0;
1532 /* If there is no setup required and we are in SSA, take the easy route
1533 replacing all SSA names representing the function parameter by the
1534 SSA name passed to function.
1536 We need to construct map for the variable anyway as it might be used
1537 in different SSA names when parameter is set in function.
1539 FIXME: This usually kills the last connection in between inlined
1540 function parameter and the actual value in debug info. Can we do
1541 better here? If we just inserted the statement, copy propagation
1542 would kill it anyway as it always did in older versions of GCC.
1544 We might want to introduce a notion that single SSA_NAME might
1545 represent multiple variables for purposes of debugging. */
1546 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
1547 && (TREE_CODE (rhs) == SSA_NAME
1548 || is_gimple_min_invariant (rhs))
1549 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1551 insert_decl_map (id, def, rhs);
1552 return;
1555 /* If the value of argument is never used, don't care about initializing
1556 it. */
1557 if (gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
1559 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
1560 return;
1563 /* Initialize this VAR_DECL from the equivalent argument. Convert
1564 the argument to the proper type in case it was promoted. */
1565 if (value)
1567 block_stmt_iterator bsi = bsi_last (bb);
1569 if (rhs == error_mark_node)
1571 insert_decl_map (id, p, var_sub);
1572 return;
1575 STRIP_USELESS_TYPE_CONVERSION (rhs);
1577 /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
1578 keep our trees in gimple form. */
1579 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
1581 def = remap_ssa_name (def, id);
1582 init_stmt = build_gimple_modify_stmt (def, rhs);
1583 SSA_NAME_DEF_STMT (def) = init_stmt;
1584 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
1585 set_default_def (var, NULL);
1587 else
1588 init_stmt = build_gimple_modify_stmt (var, rhs);
1590 /* If we did not create a gimple value and we did not create a gimple
1591 cast of a gimple value, then we will need to gimplify INIT_STMTS
1592 at the end. Note that is_gimple_cast only checks the outer
1593 tree code, not its operand. Thus the explicit check that its
1594 operand is a gimple value. */
1595 if ((!is_gimple_val (rhs)
1596 && (!is_gimple_cast (rhs)
1597 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1598 || !is_gimple_reg (var))
1600 tree_stmt_iterator i;
1602 push_gimplify_context ();
1603 gimplify_stmt (&init_stmt);
1604 if (gimple_in_ssa_p (cfun)
1605 && init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
1607 /* The replacement can expose previously unreferenced
1608 variables. */
1609 for (i = tsi_start (init_stmt); !tsi_end_p (i); tsi_next (&i))
1610 find_new_referenced_vars (tsi_stmt_ptr (i));
1612 pop_gimplify_context (NULL);
1615 /* If VAR represents a zero-sized variable, it's possible that the
1616 assignment statment may result in no gimple statements. */
1617 if (init_stmt)
1618 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1619 if (gimple_in_ssa_p (cfun))
1620 for (;!bsi_end_p (bsi); bsi_next (&bsi))
1621 mark_symbols_for_renaming (bsi_stmt (bsi));
1625 /* Generate code to initialize the parameters of the function at the
1626 top of the stack in ID from the CALL_EXPR EXP. */
1628 static void
1629 initialize_inlined_parameters (copy_body_data *id, tree exp,
1630 tree fn, basic_block bb)
1632 tree parms;
1633 tree a;
1634 tree p;
1635 tree vars = NULL_TREE;
1636 call_expr_arg_iterator iter;
1637 tree static_chain = CALL_EXPR_STATIC_CHAIN (exp);
1639 /* Figure out what the parameters are. */
1640 parms = DECL_ARGUMENTS (fn);
1642 /* Loop through the parameter declarations, replacing each with an
1643 equivalent VAR_DECL, appropriately initialized. */
1644 for (p = parms, a = first_call_expr_arg (exp, &iter); p;
1645 a = next_call_expr_arg (&iter), p = TREE_CHAIN (p))
1646 setup_one_parameter (id, p, a, fn, bb, &vars);
1648 /* Initialize the static chain. */
1649 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1650 gcc_assert (fn != current_function_decl);
1651 if (p)
1653 /* No static chain? Seems like a bug in tree-nested.c. */
1654 gcc_assert (static_chain);
1656 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1659 declare_inline_vars (id->block, vars);
1662 /* Declare a return variable to replace the RESULT_DECL for the
1663 function we are calling. An appropriate DECL_STMT is returned.
1664 The USE_STMT is filled to contain a use of the declaration to
1665 indicate the return value of the function.
1667 RETURN_SLOT, if non-null is place where to store the result. It
1668 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
1669 was the LHS of the GIMPLE_MODIFY_STMT to which this call is the RHS.
1671 The return value is a (possibly null) value that is the result of the
1672 function as seen by the callee. *USE_P is a (possibly null) value that
1673 holds the result as seen by the caller. */
1675 static tree
1676 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
1677 tree *use_p)
1679 tree callee = id->src_fn;
1680 tree caller = id->dst_fn;
1681 tree result = DECL_RESULT (callee);
1682 tree callee_type = TREE_TYPE (result);
1683 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1684 tree var, use;
1686 /* We don't need to do anything for functions that don't return
1687 anything. */
1688 if (!result || VOID_TYPE_P (callee_type))
1690 *use_p = NULL_TREE;
1691 return NULL_TREE;
1694 /* If there was a return slot, then the return value is the
1695 dereferenced address of that object. */
1696 if (return_slot)
1698 /* The front end shouldn't have used both return_slot and
1699 a modify expression. */
1700 gcc_assert (!modify_dest);
1701 if (DECL_BY_REFERENCE (result))
1703 tree return_slot_addr = build_fold_addr_expr (return_slot);
1704 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
1706 /* We are going to construct *&return_slot and we can't do that
1707 for variables believed to be not addressable.
1709 FIXME: This check possibly can match, because values returned
1710 via return slot optimization are not believed to have address
1711 taken by alias analysis. */
1712 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
1713 if (gimple_in_ssa_p (cfun))
1715 HOST_WIDE_INT bitsize;
1716 HOST_WIDE_INT bitpos;
1717 tree offset;
1718 enum machine_mode mode;
1719 int unsignedp;
1720 int volatilep;
1721 tree base;
1722 base = get_inner_reference (return_slot, &bitsize, &bitpos,
1723 &offset,
1724 &mode, &unsignedp, &volatilep,
1725 false);
1726 if (TREE_CODE (base) == INDIRECT_REF)
1727 base = TREE_OPERAND (base, 0);
1728 if (TREE_CODE (base) == SSA_NAME)
1729 base = SSA_NAME_VAR (base);
1730 mark_sym_for_renaming (base);
1732 var = return_slot_addr;
1734 else
1736 var = return_slot;
1737 gcc_assert (TREE_CODE (var) != SSA_NAME);
1738 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
1740 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1741 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1742 && !DECL_GIMPLE_REG_P (result)
1743 && DECL_P (var))
1744 DECL_GIMPLE_REG_P (var) = 0;
1745 use = NULL;
1746 goto done;
1749 /* All types requiring non-trivial constructors should have been handled. */
1750 gcc_assert (!TREE_ADDRESSABLE (callee_type));
1752 /* Attempt to avoid creating a new temporary variable. */
1753 if (modify_dest
1754 && TREE_CODE (modify_dest) != SSA_NAME)
1756 bool use_it = false;
1758 /* We can't use MODIFY_DEST if there's type promotion involved. */
1759 if (!useless_type_conversion_p (callee_type, caller_type))
1760 use_it = false;
1762 /* ??? If we're assigning to a variable sized type, then we must
1763 reuse the destination variable, because we've no good way to
1764 create variable sized temporaries at this point. */
1765 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1766 use_it = true;
1768 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1769 reuse it as the result of the call directly. Don't do this if
1770 it would promote MODIFY_DEST to addressable. */
1771 else if (TREE_ADDRESSABLE (result))
1772 use_it = false;
1773 else
1775 tree base_m = get_base_address (modify_dest);
1777 /* If the base isn't a decl, then it's a pointer, and we don't
1778 know where that's going to go. */
1779 if (!DECL_P (base_m))
1780 use_it = false;
1781 else if (is_global_var (base_m))
1782 use_it = false;
1783 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1784 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1785 && !DECL_GIMPLE_REG_P (result)
1786 && DECL_GIMPLE_REG_P (base_m))
1787 use_it = false;
1788 else if (!TREE_ADDRESSABLE (base_m))
1789 use_it = true;
1792 if (use_it)
1794 var = modify_dest;
1795 use = NULL;
1796 goto done;
1800 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1802 var = copy_result_decl_to_var (result, id);
1803 if (gimple_in_ssa_p (cfun))
1805 get_var_ann (var);
1806 add_referenced_var (var);
1809 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1810 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1811 = tree_cons (NULL_TREE, var,
1812 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1814 /* Do not have the rest of GCC warn about this variable as it should
1815 not be visible to the user. */
1816 TREE_NO_WARNING (var) = 1;
1818 declare_inline_vars (id->block, var);
1820 /* Build the use expr. If the return type of the function was
1821 promoted, convert it back to the expected type. */
1822 use = var;
1823 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
1824 use = fold_convert (caller_type, var);
1826 STRIP_USELESS_TYPE_CONVERSION (use);
1828 if (DECL_BY_REFERENCE (result))
1829 var = build_fold_addr_expr (var);
1831 done:
1832 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1833 way, when the RESULT_DECL is encountered, it will be
1834 automatically replaced by the VAR_DECL. */
1835 insert_decl_map (id, result, var);
1837 /* Remember this so we can ignore it in remap_decls. */
1838 id->retvar = var;
1840 *use_p = use;
1841 return var;
1844 /* Returns nonzero if a function can be inlined as a tree. */
1846 bool
1847 tree_inlinable_function_p (tree fn)
1849 return inlinable_function_p (fn);
1852 static const char *inline_forbidden_reason;
1854 static tree
1855 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1856 void *fnp)
1858 tree node = *nodep;
1859 tree fn = (tree) fnp;
1860 tree t;
1862 switch (TREE_CODE (node))
1864 case CALL_EXPR:
1865 /* Refuse to inline alloca call unless user explicitly forced so as
1866 this may change program's memory overhead drastically when the
1867 function using alloca is called in loop. In GCC present in
1868 SPEC2000 inlining into schedule_block cause it to require 2GB of
1869 RAM instead of 256MB. */
1870 if (alloca_call_p (node)
1871 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1873 inline_forbidden_reason
1874 = G_("function %q+F can never be inlined because it uses "
1875 "alloca (override using the always_inline attribute)");
1876 return node;
1878 t = get_callee_fndecl (node);
1879 if (! t)
1880 break;
1882 /* We cannot inline functions that call setjmp. */
1883 if (setjmp_call_p (t))
1885 inline_forbidden_reason
1886 = G_("function %q+F can never be inlined because it uses setjmp");
1887 return node;
1890 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1891 switch (DECL_FUNCTION_CODE (t))
1893 /* We cannot inline functions that take a variable number of
1894 arguments. */
1895 case BUILT_IN_VA_START:
1896 case BUILT_IN_STDARG_START:
1897 case BUILT_IN_NEXT_ARG:
1898 case BUILT_IN_VA_END:
1899 inline_forbidden_reason
1900 = G_("function %q+F can never be inlined because it "
1901 "uses variable argument lists");
1902 return node;
1904 case BUILT_IN_LONGJMP:
1905 /* We can't inline functions that call __builtin_longjmp at
1906 all. The non-local goto machinery really requires the
1907 destination be in a different function. If we allow the
1908 function calling __builtin_longjmp to be inlined into the
1909 function calling __builtin_setjmp, Things will Go Awry. */
1910 inline_forbidden_reason
1911 = G_("function %q+F can never be inlined because "
1912 "it uses setjmp-longjmp exception handling");
1913 return node;
1915 case BUILT_IN_NONLOCAL_GOTO:
1916 /* Similarly. */
1917 inline_forbidden_reason
1918 = G_("function %q+F can never be inlined because "
1919 "it uses non-local goto");
1920 return node;
1922 case BUILT_IN_RETURN:
1923 case BUILT_IN_APPLY_ARGS:
1924 /* If a __builtin_apply_args caller would be inlined,
1925 it would be saving arguments of the function it has
1926 been inlined into. Similarly __builtin_return would
1927 return from the function the inline has been inlined into. */
1928 inline_forbidden_reason
1929 = G_("function %q+F can never be inlined because "
1930 "it uses __builtin_return or __builtin_apply_args");
1931 return node;
1933 default:
1934 break;
1936 break;
1938 case GOTO_EXPR:
1939 t = TREE_OPERAND (node, 0);
1941 /* We will not inline a function which uses computed goto. The
1942 addresses of its local labels, which may be tucked into
1943 global storage, are of course not constant across
1944 instantiations, which causes unexpected behavior. */
1945 if (TREE_CODE (t) != LABEL_DECL)
1947 inline_forbidden_reason
1948 = G_("function %q+F can never be inlined "
1949 "because it contains a computed goto");
1950 return node;
1952 break;
1954 case LABEL_EXPR:
1955 t = TREE_OPERAND (node, 0);
1956 if (DECL_NONLOCAL (t))
1958 /* We cannot inline a function that receives a non-local goto
1959 because we cannot remap the destination label used in the
1960 function that is performing the non-local goto. */
1961 inline_forbidden_reason
1962 = G_("function %q+F can never be inlined "
1963 "because it receives a non-local goto");
1964 return node;
1966 break;
1968 case RECORD_TYPE:
1969 case UNION_TYPE:
1970 /* We cannot inline a function of the form
1972 void F (int i) { struct S { int ar[i]; } s; }
1974 Attempting to do so produces a catch-22.
1975 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1976 UNION_TYPE nodes, then it goes into infinite recursion on a
1977 structure containing a pointer to its own type. If it doesn't,
1978 then the type node for S doesn't get adjusted properly when
1979 F is inlined.
1981 ??? This is likely no longer true, but it's too late in the 4.0
1982 cycle to try to find out. This should be checked for 4.1. */
1983 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1984 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1986 inline_forbidden_reason
1987 = G_("function %q+F can never be inlined "
1988 "because it uses variable sized variables");
1989 return node;
1992 default:
1993 break;
1996 return NULL_TREE;
1999 static tree
2000 inline_forbidden_p_2 (tree *nodep, int *walk_subtrees,
2001 void *fnp)
2003 tree node = *nodep;
2004 tree fn = (tree) fnp;
2006 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2008 inline_forbidden_reason
2009 = G_("function %q+F can never be inlined "
2010 "because it saves address of local label in a static variable");
2011 return node;
2014 if (TYPE_P (node))
2015 *walk_subtrees = 0;
2017 return NULL_TREE;
2020 /* Return subexpression representing possible alloca call, if any. */
2021 static tree
2022 inline_forbidden_p (tree fndecl)
2024 location_t saved_loc = input_location;
2025 block_stmt_iterator bsi;
2026 basic_block bb;
2027 tree ret = NULL_TREE;
2028 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2029 tree step;
2031 FOR_EACH_BB_FN (bb, fun)
2032 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2034 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
2035 inline_forbidden_p_1, fndecl);
2036 if (ret)
2037 goto egress;
2040 for (step = fun->unexpanded_var_list; step; step = TREE_CHAIN (step))
2042 tree decl = TREE_VALUE (step);
2043 if (TREE_CODE (decl) == VAR_DECL
2044 && TREE_STATIC (decl)
2045 && !DECL_EXTERNAL (decl)
2046 && DECL_INITIAL (decl))
2047 ret = walk_tree_without_duplicates (&DECL_INITIAL (decl),
2048 inline_forbidden_p_2, fndecl);
2049 if (ret)
2050 goto egress;
2053 egress:
2054 input_location = saved_loc;
2055 return ret;
2058 /* Returns nonzero if FN is a function that does not have any
2059 fundamental inline blocking properties. */
2061 static bool
2062 inlinable_function_p (tree fn)
2064 bool inlinable = true;
2065 bool do_warning;
2066 tree always_inline;
2068 /* If we've already decided this function shouldn't be inlined,
2069 there's no need to check again. */
2070 if (DECL_UNINLINABLE (fn))
2071 return false;
2073 /* We only warn for functions declared `inline' by the user. */
2074 do_warning = (warn_inline
2075 && DECL_INLINE (fn)
2076 && DECL_DECLARED_INLINE_P (fn)
2077 && !DECL_IN_SYSTEM_HEADER (fn));
2079 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2081 if (flag_really_no_inline
2082 && always_inline == NULL)
2084 if (do_warning)
2085 warning (OPT_Winline, "function %q+F can never be inlined because it "
2086 "is suppressed using -fno-inline", fn);
2087 inlinable = false;
2090 /* Don't auto-inline anything that might not be bound within
2091 this unit of translation. */
2092 else if (!DECL_DECLARED_INLINE_P (fn)
2093 && DECL_REPLACEABLE_P (fn))
2094 inlinable = false;
2096 else if (!function_attribute_inlinable_p (fn))
2098 if (do_warning)
2099 warning (OPT_Winline, "function %q+F can never be inlined because it "
2100 "uses attributes conflicting with inlining", fn);
2101 inlinable = false;
2104 /* If we don't have the function body available, we can't inline it.
2105 However, this should not be recorded since we also get here for
2106 forward declared inline functions. Therefore, return at once. */
2107 if (!DECL_SAVED_TREE (fn))
2108 return false;
2110 /* If we're not inlining at all, then we cannot inline this function. */
2111 else if (!flag_inline_trees)
2112 inlinable = false;
2114 /* Only try to inline functions if DECL_INLINE is set. This should be
2115 true for all functions declared `inline', and for all other functions
2116 as well with -finline-functions.
2118 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
2119 it's the front-end that must set DECL_INLINE in this case, because
2120 dwarf2out loses if a function that does not have DECL_INLINE set is
2121 inlined anyway. That is why we have both DECL_INLINE and
2122 DECL_DECLARED_INLINE_P. */
2123 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
2124 here should be redundant. */
2125 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
2126 inlinable = false;
2128 else if (inline_forbidden_p (fn))
2130 /* See if we should warn about uninlinable functions. Previously,
2131 some of these warnings would be issued while trying to expand
2132 the function inline, but that would cause multiple warnings
2133 about functions that would for example call alloca. But since
2134 this a property of the function, just one warning is enough.
2135 As a bonus we can now give more details about the reason why a
2136 function is not inlinable. */
2137 if (always_inline)
2138 sorry (inline_forbidden_reason, fn);
2139 else if (do_warning)
2140 warning (OPT_Winline, inline_forbidden_reason, fn);
2142 inlinable = false;
2145 /* Squirrel away the result so that we don't have to check again. */
2146 DECL_UNINLINABLE (fn) = !inlinable;
2148 return inlinable;
2151 /* Estimate the cost of a memory move. Use machine dependent
2152 word size and take possible memcpy call into account. */
2155 estimate_move_cost (tree type)
2157 HOST_WIDE_INT size;
2159 size = int_size_in_bytes (type);
2161 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
2162 /* Cost of a memcpy call, 3 arguments and the call. */
2163 return 4;
2164 else
2165 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
2168 /* Arguments for estimate_num_insns_1. */
2170 struct eni_data
2172 /* Used to return the number of insns. */
2173 int count;
2175 /* Weights of various constructs. */
2176 eni_weights *weights;
2179 /* Used by estimate_num_insns. Estimate number of instructions seen
2180 by given statement. */
2182 static tree
2183 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
2185 struct eni_data *d = data;
2186 tree x = *tp;
2187 unsigned cost;
2189 if (IS_TYPE_OR_DECL_P (x))
2191 *walk_subtrees = 0;
2192 return NULL;
2194 /* Assume that constants and references counts nothing. These should
2195 be majorized by amount of operations among them we count later
2196 and are common target of CSE and similar optimizations. */
2197 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
2198 return NULL;
2200 switch (TREE_CODE (x))
2202 /* Containers have no cost. */
2203 case TREE_LIST:
2204 case TREE_VEC:
2205 case BLOCK:
2206 case COMPONENT_REF:
2207 case BIT_FIELD_REF:
2208 case INDIRECT_REF:
2209 case ALIGN_INDIRECT_REF:
2210 case MISALIGNED_INDIRECT_REF:
2211 case ARRAY_REF:
2212 case ARRAY_RANGE_REF:
2213 case OBJ_TYPE_REF:
2214 case EXC_PTR_EXPR: /* ??? */
2215 case FILTER_EXPR: /* ??? */
2216 case COMPOUND_EXPR:
2217 case BIND_EXPR:
2218 case WITH_CLEANUP_EXPR:
2219 case NOP_EXPR:
2220 case CONVERT_EXPR:
2221 case VIEW_CONVERT_EXPR:
2222 case SAVE_EXPR:
2223 case ADDR_EXPR:
2224 case COMPLEX_EXPR:
2225 case RANGE_EXPR:
2226 case CASE_LABEL_EXPR:
2227 case SSA_NAME:
2228 case CATCH_EXPR:
2229 case EH_FILTER_EXPR:
2230 case STATEMENT_LIST:
2231 case ERROR_MARK:
2232 case NON_LVALUE_EXPR:
2233 case FDESC_EXPR:
2234 case VA_ARG_EXPR:
2235 case TRY_CATCH_EXPR:
2236 case TRY_FINALLY_EXPR:
2237 case LABEL_EXPR:
2238 case GOTO_EXPR:
2239 case RETURN_EXPR:
2240 case EXIT_EXPR:
2241 case LOOP_EXPR:
2242 case PHI_NODE:
2243 case WITH_SIZE_EXPR:
2244 case OMP_CLAUSE:
2245 case OMP_RETURN:
2246 case OMP_CONTINUE:
2247 case OMP_SECTIONS_SWITCH:
2248 case OMP_ATOMIC_STORE:
2249 break;
2251 /* We don't account constants for now. Assume that the cost is amortized
2252 by operations that do use them. We may re-consider this decision once
2253 we are able to optimize the tree before estimating its size and break
2254 out static initializers. */
2255 case IDENTIFIER_NODE:
2256 case INTEGER_CST:
2257 case REAL_CST:
2258 case FIXED_CST:
2259 case COMPLEX_CST:
2260 case VECTOR_CST:
2261 case STRING_CST:
2262 *walk_subtrees = 0;
2263 return NULL;
2265 /* CHANGE_DYNAMIC_TYPE_EXPR explicitly expands to nothing. */
2266 case CHANGE_DYNAMIC_TYPE_EXPR:
2267 *walk_subtrees = 0;
2268 return NULL;
2270 /* Try to estimate the cost of assignments. We have three cases to
2271 deal with:
2272 1) Simple assignments to registers;
2273 2) Stores to things that must live in memory. This includes
2274 "normal" stores to scalars, but also assignments of large
2275 structures, or constructors of big arrays;
2276 3) TARGET_EXPRs.
2278 Let us look at the first two cases, assuming we have "a = b + C":
2279 <GIMPLE_MODIFY_STMT <var_decl "a">
2280 <plus_expr <var_decl "b"> <constant C>>
2281 If "a" is a GIMPLE register, the assignment to it is free on almost
2282 any target, because "a" usually ends up in a real register. Hence
2283 the only cost of this expression comes from the PLUS_EXPR, and we
2284 can ignore the GIMPLE_MODIFY_STMT.
2285 If "a" is not a GIMPLE register, the assignment to "a" will most
2286 likely be a real store, so the cost of the GIMPLE_MODIFY_STMT is the cost
2287 of moving something into "a", which we compute using the function
2288 estimate_move_cost.
2290 The third case deals with TARGET_EXPRs, for which the semantics are
2291 that a temporary is assigned, unless the TARGET_EXPR itself is being
2292 assigned to something else. In the latter case we do not need the
2293 temporary. E.g. in:
2294 <GIMPLE_MODIFY_STMT <var_decl "a"> <target_expr>>, the
2295 GIMPLE_MODIFY_STMT is free. */
2296 case INIT_EXPR:
2297 case GIMPLE_MODIFY_STMT:
2298 /* Is the right and side a TARGET_EXPR? */
2299 if (TREE_CODE (GENERIC_TREE_OPERAND (x, 1)) == TARGET_EXPR)
2300 break;
2301 /* ... fall through ... */
2303 case TARGET_EXPR:
2304 x = GENERIC_TREE_OPERAND (x, 0);
2305 /* Is this an assignments to a register? */
2306 if (is_gimple_reg (x))
2307 break;
2308 /* Otherwise it's a store, so fall through to compute the move cost. */
2310 case CONSTRUCTOR:
2311 d->count += estimate_move_cost (TREE_TYPE (x));
2312 break;
2314 /* Assign cost of 1 to usual operations.
2315 ??? We may consider mapping RTL costs to this. */
2316 case COND_EXPR:
2317 case VEC_COND_EXPR:
2319 case PLUS_EXPR:
2320 case POINTER_PLUS_EXPR:
2321 case MINUS_EXPR:
2322 case MULT_EXPR:
2324 case FIXED_CONVERT_EXPR:
2325 case FIX_TRUNC_EXPR:
2327 case NEGATE_EXPR:
2328 case FLOAT_EXPR:
2329 case MIN_EXPR:
2330 case MAX_EXPR:
2331 case ABS_EXPR:
2333 case LSHIFT_EXPR:
2334 case RSHIFT_EXPR:
2335 case LROTATE_EXPR:
2336 case RROTATE_EXPR:
2337 case VEC_LSHIFT_EXPR:
2338 case VEC_RSHIFT_EXPR:
2340 case BIT_IOR_EXPR:
2341 case BIT_XOR_EXPR:
2342 case BIT_AND_EXPR:
2343 case BIT_NOT_EXPR:
2345 case TRUTH_ANDIF_EXPR:
2346 case TRUTH_ORIF_EXPR:
2347 case TRUTH_AND_EXPR:
2348 case TRUTH_OR_EXPR:
2349 case TRUTH_XOR_EXPR:
2350 case TRUTH_NOT_EXPR:
2352 case LT_EXPR:
2353 case LE_EXPR:
2354 case GT_EXPR:
2355 case GE_EXPR:
2356 case EQ_EXPR:
2357 case NE_EXPR:
2358 case ORDERED_EXPR:
2359 case UNORDERED_EXPR:
2361 case UNLT_EXPR:
2362 case UNLE_EXPR:
2363 case UNGT_EXPR:
2364 case UNGE_EXPR:
2365 case UNEQ_EXPR:
2366 case LTGT_EXPR:
2368 case CONJ_EXPR:
2370 case PREDECREMENT_EXPR:
2371 case PREINCREMENT_EXPR:
2372 case POSTDECREMENT_EXPR:
2373 case POSTINCREMENT_EXPR:
2375 case ASM_EXPR:
2377 case REALIGN_LOAD_EXPR:
2379 case REDUC_MAX_EXPR:
2380 case REDUC_MIN_EXPR:
2381 case REDUC_PLUS_EXPR:
2382 case WIDEN_SUM_EXPR:
2383 case DOT_PROD_EXPR:
2384 case VEC_WIDEN_MULT_HI_EXPR:
2385 case VEC_WIDEN_MULT_LO_EXPR:
2386 case VEC_UNPACK_HI_EXPR:
2387 case VEC_UNPACK_LO_EXPR:
2388 case VEC_UNPACK_FLOAT_HI_EXPR:
2389 case VEC_UNPACK_FLOAT_LO_EXPR:
2390 case VEC_PACK_TRUNC_EXPR:
2391 case VEC_PACK_SAT_EXPR:
2392 case VEC_PACK_FIX_TRUNC_EXPR:
2394 case WIDEN_MULT_EXPR:
2396 case VEC_EXTRACT_EVEN_EXPR:
2397 case VEC_EXTRACT_ODD_EXPR:
2398 case VEC_INTERLEAVE_HIGH_EXPR:
2399 case VEC_INTERLEAVE_LOW_EXPR:
2401 case RESX_EXPR:
2402 d->count += 1;
2403 break;
2405 case SWITCH_EXPR:
2406 /* Take into account cost of the switch + guess 2 conditional jumps for
2407 each case label.
2409 TODO: once the switch expansion logic is sufficiently separated, we can
2410 do better job on estimating cost of the switch. */
2411 d->count += TREE_VEC_LENGTH (SWITCH_LABELS (x)) * 2;
2412 break;
2414 /* Few special cases of expensive operations. This is useful
2415 to avoid inlining on functions having too many of these. */
2416 case TRUNC_DIV_EXPR:
2417 case CEIL_DIV_EXPR:
2418 case FLOOR_DIV_EXPR:
2419 case ROUND_DIV_EXPR:
2420 case EXACT_DIV_EXPR:
2421 case TRUNC_MOD_EXPR:
2422 case CEIL_MOD_EXPR:
2423 case FLOOR_MOD_EXPR:
2424 case ROUND_MOD_EXPR:
2425 case RDIV_EXPR:
2426 d->count += d->weights->div_mod_cost;
2427 break;
2428 case CALL_EXPR:
2430 tree decl = get_callee_fndecl (x);
2432 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
2433 cost = d->weights->target_builtin_call_cost;
2434 else
2435 cost = d->weights->call_cost;
2437 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
2438 switch (DECL_FUNCTION_CODE (decl))
2440 case BUILT_IN_CONSTANT_P:
2441 *walk_subtrees = 0;
2442 return NULL_TREE;
2443 case BUILT_IN_EXPECT:
2444 return NULL_TREE;
2445 /* Prefetch instruction is not expensive. */
2446 case BUILT_IN_PREFETCH:
2447 cost = 1;
2448 break;
2449 default:
2450 break;
2453 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
2454 that does use function declaration to figure out the arguments. */
2455 if (!decl)
2457 tree a;
2458 call_expr_arg_iterator iter;
2459 FOR_EACH_CALL_EXPR_ARG (a, iter, x)
2460 d->count += estimate_move_cost (TREE_TYPE (a));
2462 else
2464 tree arg;
2465 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2466 d->count += estimate_move_cost (TREE_TYPE (arg));
2469 d->count += cost;
2470 break;
2473 case OMP_PARALLEL:
2474 case OMP_FOR:
2475 case OMP_SECTIONS:
2476 case OMP_SINGLE:
2477 case OMP_SECTION:
2478 case OMP_MASTER:
2479 case OMP_ORDERED:
2480 case OMP_CRITICAL:
2481 case OMP_ATOMIC:
2482 case OMP_ATOMIC_LOAD:
2483 /* OpenMP directives are generally very expensive. */
2484 d->count += d->weights->omp_cost;
2485 break;
2487 default:
2488 gcc_unreachable ();
2490 return NULL;
2493 /* Estimate number of instructions that will be created by expanding EXPR.
2494 WEIGHTS contains weights attributed to various constructs. */
2497 estimate_num_insns (tree expr, eni_weights *weights)
2499 struct pointer_set_t *visited_nodes;
2500 basic_block bb;
2501 block_stmt_iterator bsi;
2502 struct function *my_function;
2503 struct eni_data data;
2505 data.count = 0;
2506 data.weights = weights;
2508 /* If we're given an entire function, walk the CFG. */
2509 if (TREE_CODE (expr) == FUNCTION_DECL)
2511 my_function = DECL_STRUCT_FUNCTION (expr);
2512 gcc_assert (my_function && my_function->cfg);
2513 visited_nodes = pointer_set_create ();
2514 FOR_EACH_BB_FN (bb, my_function)
2516 for (bsi = bsi_start (bb);
2517 !bsi_end_p (bsi);
2518 bsi_next (&bsi))
2520 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
2521 &data, visited_nodes);
2524 pointer_set_destroy (visited_nodes);
2526 else
2527 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &data);
2529 return data.count;
2532 /* Initializes weights used by estimate_num_insns. */
2534 void
2535 init_inline_once (void)
2537 eni_inlining_weights.call_cost = PARAM_VALUE (PARAM_INLINE_CALL_COST);
2538 eni_inlining_weights.target_builtin_call_cost = 1;
2539 eni_inlining_weights.div_mod_cost = 10;
2540 eni_inlining_weights.omp_cost = 40;
2542 eni_size_weights.call_cost = 1;
2543 eni_size_weights.target_builtin_call_cost = 1;
2544 eni_size_weights.div_mod_cost = 1;
2545 eni_size_weights.omp_cost = 40;
2547 /* Estimating time for call is difficult, since we have no idea what the
2548 called function does. In the current uses of eni_time_weights,
2549 underestimating the cost does less harm than overestimating it, so
2550 we choose a rather small value here. */
2551 eni_time_weights.call_cost = 10;
2552 eni_time_weights.target_builtin_call_cost = 10;
2553 eni_time_weights.div_mod_cost = 10;
2554 eni_time_weights.omp_cost = 40;
2557 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
2558 static void
2559 add_lexical_block (tree current_block, tree new_block)
2561 tree *blk_p;
2563 /* Walk to the last sub-block. */
2564 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
2565 *blk_p;
2566 blk_p = &BLOCK_CHAIN (*blk_p))
2568 *blk_p = new_block;
2569 BLOCK_SUPERCONTEXT (new_block) = current_block;
2572 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
2574 static bool
2575 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
2577 copy_body_data *id;
2578 tree t;
2579 tree retvar, use_retvar;
2580 tree fn;
2581 struct pointer_map_t *st;
2582 tree return_slot;
2583 tree modify_dest;
2584 location_t saved_location;
2585 struct cgraph_edge *cg_edge;
2586 const char *reason;
2587 basic_block return_block;
2588 edge e;
2589 block_stmt_iterator bsi, stmt_bsi;
2590 bool successfully_inlined = FALSE;
2591 bool purge_dead_abnormal_edges;
2592 tree t_step;
2593 tree var;
2595 /* See what we've got. */
2596 id = (copy_body_data *) data;
2597 t = *tp;
2599 /* Set input_location here so we get the right instantiation context
2600 if we call instantiate_decl from inlinable_function_p. */
2601 saved_location = input_location;
2602 if (EXPR_HAS_LOCATION (t))
2603 input_location = EXPR_LOCATION (t);
2605 /* From here on, we're only interested in CALL_EXPRs. */
2606 if (TREE_CODE (t) != CALL_EXPR)
2607 goto egress;
2609 /* First, see if we can figure out what function is being called.
2610 If we cannot, then there is no hope of inlining the function. */
2611 fn = get_callee_fndecl (t);
2612 if (!fn)
2613 goto egress;
2615 /* Turn forward declarations into real ones. */
2616 fn = cgraph_node (fn)->decl;
2618 /* If fn is a declaration of a function in a nested scope that was
2619 globally declared inline, we don't set its DECL_INITIAL.
2620 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
2621 C++ front-end uses it for cdtors to refer to their internal
2622 declarations, that are not real functions. Fortunately those
2623 don't have trees to be saved, so we can tell by checking their
2624 DECL_SAVED_TREE. */
2625 if (! DECL_INITIAL (fn)
2626 && DECL_ABSTRACT_ORIGIN (fn)
2627 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
2628 fn = DECL_ABSTRACT_ORIGIN (fn);
2630 /* Objective C and fortran still calls tree_rest_of_compilation directly.
2631 Kill this check once this is fixed. */
2632 if (!id->dst_node->analyzed)
2633 goto egress;
2635 cg_edge = cgraph_edge (id->dst_node, stmt);
2637 /* Constant propagation on argument done during previous inlining
2638 may create new direct call. Produce an edge for it. */
2639 if (!cg_edge)
2641 struct cgraph_node *dest = cgraph_node (fn);
2643 /* We have missing edge in the callgraph. This can happen in one case
2644 where previous inlining turned indirect call into direct call by
2645 constant propagating arguments. In all other cases we hit a bug
2646 (incorrect node sharing is most common reason for missing edges. */
2647 gcc_assert (dest->needed || !flag_unit_at_a_time);
2648 cgraph_create_edge (id->dst_node, dest, stmt,
2649 bb->count, CGRAPH_FREQ_BASE,
2650 bb->loop_depth)->inline_failed
2651 = N_("originally indirect function call not considered for inlining");
2652 if (dump_file)
2654 fprintf (dump_file, "Created new direct edge to %s",
2655 cgraph_node_name (dest));
2657 goto egress;
2660 /* Don't try to inline functions that are not well-suited to
2661 inlining. */
2662 if (!cgraph_inline_p (cg_edge, &reason))
2664 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2665 /* Avoid warnings during early inline pass. */
2666 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2668 sorry ("inlining failed in call to %q+F: %s", fn, reason);
2669 sorry ("called from here");
2671 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2672 && !DECL_IN_SYSTEM_HEADER (fn)
2673 && strlen (reason)
2674 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2675 /* Avoid warnings during early inline pass. */
2676 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2678 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2679 fn, reason);
2680 warning (OPT_Winline, "called from here");
2682 goto egress;
2684 fn = cg_edge->callee->decl;
2686 #ifdef ENABLE_CHECKING
2687 if (cg_edge->callee->decl != id->dst_node->decl)
2688 verify_cgraph_node (cg_edge->callee);
2689 #endif
2691 /* We will be inlining this callee. */
2692 id->eh_region = lookup_stmt_eh_region (stmt);
2694 /* Split the block holding the CALL_EXPR. */
2695 e = split_block (bb, stmt);
2696 bb = e->src;
2697 return_block = e->dest;
2698 remove_edge (e);
2700 /* split_block splits after the statement; work around this by
2701 moving the call into the second block manually. Not pretty,
2702 but seems easier than doing the CFG manipulation by hand
2703 when the CALL_EXPR is in the last statement of BB. */
2704 stmt_bsi = bsi_last (bb);
2705 bsi_remove (&stmt_bsi, false);
2707 /* If the CALL_EXPR was in the last statement of BB, it may have
2708 been the source of abnormal edges. In this case, schedule
2709 the removal of dead abnormal edges. */
2710 bsi = bsi_start (return_block);
2711 if (bsi_end_p (bsi))
2713 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2714 purge_dead_abnormal_edges = true;
2716 else
2718 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2719 purge_dead_abnormal_edges = false;
2722 stmt_bsi = bsi_start (return_block);
2724 /* Build a block containing code to initialize the arguments, the
2725 actual inline expansion of the body, and a label for the return
2726 statements within the function to jump to. The type of the
2727 statement expression is the return type of the function call. */
2728 id->block = make_node (BLOCK);
2729 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2730 BLOCK_SOURCE_LOCATION (id->block) = input_location;
2731 add_lexical_block (TREE_BLOCK (stmt), id->block);
2733 /* Local declarations will be replaced by their equivalents in this
2734 map. */
2735 st = id->decl_map;
2736 id->decl_map = pointer_map_create ();
2738 /* Record the function we are about to inline. */
2739 id->src_fn = fn;
2740 id->src_node = cg_edge->callee;
2741 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
2742 id->call_expr = t;
2744 gcc_assert (!id->src_cfun->after_inlining);
2746 id->entry_bb = bb;
2747 initialize_inlined_parameters (id, t, fn, bb);
2749 if (DECL_INITIAL (fn))
2750 add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2752 /* Return statements in the function body will be replaced by jumps
2753 to the RET_LABEL. */
2755 gcc_assert (DECL_INITIAL (fn));
2756 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2758 /* Find the lhs to which the result of this call is assigned. */
2759 return_slot = NULL;
2760 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
2762 modify_dest = GIMPLE_STMT_OPERAND (stmt, 0);
2764 /* The function which we are inlining might not return a value,
2765 in which case we should issue a warning that the function
2766 does not return a value. In that case the optimizers will
2767 see that the variable to which the value is assigned was not
2768 initialized. We do not want to issue a warning about that
2769 uninitialized variable. */
2770 if (DECL_P (modify_dest))
2771 TREE_NO_WARNING (modify_dest) = 1;
2772 if (CALL_EXPR_RETURN_SLOT_OPT (t))
2774 return_slot = modify_dest;
2775 modify_dest = NULL;
2778 else
2779 modify_dest = NULL;
2781 /* If we are inlining a call to the C++ operator new, we don't want
2782 to use type based alias analysis on the return value. Otherwise
2783 we may get confused if the compiler sees that the inlined new
2784 function returns a pointer which was just deleted. See bug
2785 33407. */
2786 if (DECL_IS_OPERATOR_NEW (fn))
2788 return_slot = NULL;
2789 modify_dest = NULL;
2792 /* Declare the return variable for the function. */
2793 retvar = declare_return_variable (id, return_slot,
2794 modify_dest, &use_retvar);
2796 if (DECL_IS_OPERATOR_NEW (fn))
2798 gcc_assert (TREE_CODE (retvar) == VAR_DECL
2799 && POINTER_TYPE_P (TREE_TYPE (retvar)));
2800 DECL_NO_TBAA_P (retvar) = 1;
2803 /* This is it. Duplicate the callee body. Assume callee is
2804 pre-gimplified. Note that we must not alter the caller
2805 function in any way before this point, as this CALL_EXPR may be
2806 a self-referential call; if we're calling ourselves, we need to
2807 duplicate our body before altering anything. */
2808 copy_body (id, bb->count, bb->frequency, bb, return_block);
2810 /* Add local vars in this inlined callee to caller. */
2811 t_step = id->src_cfun->unexpanded_var_list;
2812 for (; t_step; t_step = TREE_CHAIN (t_step))
2814 var = TREE_VALUE (t_step);
2815 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2816 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2817 cfun->unexpanded_var_list);
2818 else
2819 cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2820 cfun->unexpanded_var_list);
2823 /* Clean up. */
2824 pointer_map_destroy (id->decl_map);
2825 id->decl_map = st;
2827 /* If the inlined function returns a result that we care about,
2828 clobber the CALL_EXPR with a reference to the return variable. */
2829 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2831 *tp = use_retvar;
2832 if (gimple_in_ssa_p (cfun))
2834 update_stmt (stmt);
2835 mark_symbols_for_renaming (stmt);
2837 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2839 else
2840 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2841 tsi_delink() will leave the iterator in a sane state. */
2843 /* Handle case of inlining function that miss return statement so
2844 return value becomes undefined. */
2845 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
2846 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
2848 tree name = TREE_OPERAND (stmt, 0);
2849 tree var = SSA_NAME_VAR (TREE_OPERAND (stmt, 0));
2850 tree def = gimple_default_def (cfun, var);
2852 /* If the variable is used undefined, make this name undefined via
2853 move. */
2854 if (def)
2856 TREE_OPERAND (stmt, 1) = def;
2857 update_stmt (stmt);
2859 /* Otherwise make this variable undefined. */
2860 else
2862 bsi_remove (&stmt_bsi, true);
2863 set_default_def (var, name);
2864 SSA_NAME_DEF_STMT (name) = build_empty_stmt ();
2867 else
2868 bsi_remove (&stmt_bsi, true);
2871 if (purge_dead_abnormal_edges)
2872 tree_purge_dead_abnormal_call_edges (return_block);
2874 /* If the value of the new expression is ignored, that's OK. We
2875 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2876 the equivalent inlined version either. */
2877 TREE_USED (*tp) = 1;
2879 /* Output the inlining info for this abstract function, since it has been
2880 inlined. If we don't do this now, we can lose the information about the
2881 variables in the function when the blocks get blown away as soon as we
2882 remove the cgraph node. */
2883 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2885 /* Update callgraph if needed. */
2886 cgraph_remove_node (cg_edge->callee);
2888 id->block = NULL_TREE;
2889 successfully_inlined = TRUE;
2891 egress:
2892 input_location = saved_location;
2893 return successfully_inlined;
2896 /* Expand call statements reachable from STMT_P.
2897 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2898 in a GIMPLE_MODIFY_STMT. See tree-gimple.c:get_call_expr_in(). We can
2899 unfortunately not use that function here because we need a pointer
2900 to the CALL_EXPR, not the tree itself. */
2902 static bool
2903 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
2905 block_stmt_iterator bsi;
2907 /* Register specific tree functions. */
2908 tree_register_cfg_hooks ();
2909 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2911 tree *expr_p = bsi_stmt_ptr (bsi);
2912 tree stmt = *expr_p;
2914 if (TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT)
2915 expr_p = &GIMPLE_STMT_OPERAND (*expr_p, 1);
2916 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2917 expr_p = &TREE_OPERAND (*expr_p, 0);
2918 if (TREE_CODE (*expr_p) == CALL_EXPR)
2919 if (expand_call_inline (bb, stmt, expr_p, id))
2920 return true;
2922 return false;
2925 /* Walk all basic blocks created after FIRST and try to fold every statement
2926 in the STATEMENTS pointer set. */
2927 static void
2928 fold_marked_statements (int first, struct pointer_set_t *statements)
2930 for (;first < n_basic_blocks;first++)
2931 if (BASIC_BLOCK (first))
2933 block_stmt_iterator bsi;
2934 for (bsi = bsi_start (BASIC_BLOCK (first));
2935 !bsi_end_p (bsi); bsi_next (&bsi))
2936 if (pointer_set_contains (statements, bsi_stmt (bsi)))
2938 tree old_stmt = bsi_stmt (bsi);
2939 if (fold_stmt (bsi_stmt_ptr (bsi)))
2941 update_stmt (bsi_stmt (bsi));
2942 if (maybe_clean_or_replace_eh_stmt (old_stmt, bsi_stmt (bsi)))
2943 tree_purge_dead_eh_edges (BASIC_BLOCK (first));
2949 /* Return true if BB has at least one abnormal outgoing edge. */
2951 static inline bool
2952 has_abnormal_outgoing_edge_p (basic_block bb)
2954 edge e;
2955 edge_iterator ei;
2957 FOR_EACH_EDGE (e, ei, bb->succs)
2958 if (e->flags & EDGE_ABNORMAL)
2959 return true;
2961 return false;
2964 /* Expand calls to inline functions in the body of FN. */
2966 unsigned int
2967 optimize_inline_calls (tree fn)
2969 copy_body_data id;
2970 tree prev_fn;
2971 basic_block bb;
2972 int last = n_basic_blocks;
2973 /* There is no point in performing inlining if errors have already
2974 occurred -- and we might crash if we try to inline invalid
2975 code. */
2976 if (errorcount || sorrycount)
2977 return 0;
2979 /* Clear out ID. */
2980 memset (&id, 0, sizeof (id));
2982 id.src_node = id.dst_node = cgraph_node (fn);
2983 id.dst_fn = fn;
2984 /* Or any functions that aren't finished yet. */
2985 prev_fn = NULL_TREE;
2986 if (current_function_decl)
2988 id.dst_fn = current_function_decl;
2989 prev_fn = current_function_decl;
2992 id.copy_decl = copy_decl_maybe_to_var;
2993 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2994 id.transform_new_cfg = false;
2995 id.transform_return_to_modify = true;
2996 id.transform_lang_insert_block = false;
2997 id.statements_to_fold = pointer_set_create ();
2999 push_gimplify_context ();
3001 /* We make no attempts to keep dominance info up-to-date. */
3002 free_dominance_info (CDI_DOMINATORS);
3003 free_dominance_info (CDI_POST_DOMINATORS);
3005 /* Reach the trees by walking over the CFG, and note the
3006 enclosing basic-blocks in the call edges. */
3007 /* We walk the blocks going forward, because inlined function bodies
3008 will split id->current_basic_block, and the new blocks will
3009 follow it; we'll trudge through them, processing their CALL_EXPRs
3010 along the way. */
3011 FOR_EACH_BB (bb)
3012 gimple_expand_calls_inline (bb, &id);
3014 pop_gimplify_context (NULL);
3016 #ifdef ENABLE_CHECKING
3018 struct cgraph_edge *e;
3020 verify_cgraph_node (id.dst_node);
3022 /* Double check that we inlined everything we are supposed to inline. */
3023 for (e = id.dst_node->callees; e; e = e->next_callee)
3024 gcc_assert (e->inline_failed);
3026 #endif
3028 /* Fold the statements before compacting/renumbering the basic blocks. */
3029 fold_marked_statements (last, id.statements_to_fold);
3030 pointer_set_destroy (id.statements_to_fold);
3032 /* Renumber the (code) basic_blocks consecutively. */
3033 compact_blocks ();
3034 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3035 number_blocks (fn);
3037 /* We are not going to maintain the cgraph edges up to date.
3038 Kill it so it won't confuse us. */
3039 cgraph_node_remove_callees (id.dst_node);
3041 fold_cond_expr_cond ();
3042 /* It would be nice to check SSA/CFG/statement consistency here, but it is
3043 not possible yet - the IPA passes might make various functions to not
3044 throw and they don't care to proactively update local EH info. This is
3045 done later in fixup_cfg pass that also execute the verification. */
3046 return (TODO_update_ssa | TODO_cleanup_cfg
3047 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
3048 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
3051 /* FN is a function that has a complete body, and CLONE is a function whose
3052 body is to be set to a copy of FN, mapping argument declarations according
3053 to the ARG_MAP splay_tree. */
3055 void
3056 clone_body (tree clone, tree fn, void *arg_map)
3058 copy_body_data id;
3060 /* Clone the body, as if we were making an inline call. But, remap the
3061 parameters in the callee to the parameters of caller. */
3062 memset (&id, 0, sizeof (id));
3063 id.src_fn = fn;
3064 id.dst_fn = clone;
3065 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
3066 id.decl_map = (struct pointer_map_t *)arg_map;
3068 id.copy_decl = copy_decl_no_change;
3069 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3070 id.transform_new_cfg = true;
3071 id.transform_return_to_modify = false;
3072 id.transform_lang_insert_block = true;
3074 /* We're not inside any EH region. */
3075 id.eh_region = -1;
3077 /* Actually copy the body. */
3078 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
3081 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
3083 tree
3084 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3086 enum tree_code code = TREE_CODE (*tp);
3087 enum tree_code_class cl = TREE_CODE_CLASS (code);
3089 /* We make copies of most nodes. */
3090 if (IS_EXPR_CODE_CLASS (cl)
3091 || IS_GIMPLE_STMT_CODE_CLASS (cl)
3092 || code == TREE_LIST
3093 || code == TREE_VEC
3094 || code == TYPE_DECL
3095 || code == OMP_CLAUSE)
3097 /* Because the chain gets clobbered when we make a copy, we save it
3098 here. */
3099 tree chain = NULL_TREE, new;
3101 if (!GIMPLE_TUPLE_P (*tp))
3102 chain = TREE_CHAIN (*tp);
3104 /* Copy the node. */
3105 new = copy_node (*tp);
3107 /* Propagate mudflap marked-ness. */
3108 if (flag_mudflap && mf_marked_p (*tp))
3109 mf_mark (new);
3111 *tp = new;
3113 /* Now, restore the chain, if appropriate. That will cause
3114 walk_tree to walk into the chain as well. */
3115 if (code == PARM_DECL
3116 || code == TREE_LIST
3117 || code == OMP_CLAUSE)
3118 TREE_CHAIN (*tp) = chain;
3120 /* For now, we don't update BLOCKs when we make copies. So, we
3121 have to nullify all BIND_EXPRs. */
3122 if (TREE_CODE (*tp) == BIND_EXPR)
3123 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
3125 else if (code == CONSTRUCTOR)
3127 /* CONSTRUCTOR nodes need special handling because
3128 we need to duplicate the vector of elements. */
3129 tree new;
3131 new = copy_node (*tp);
3133 /* Propagate mudflap marked-ness. */
3134 if (flag_mudflap && mf_marked_p (*tp))
3135 mf_mark (new);
3137 CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
3138 CONSTRUCTOR_ELTS (*tp));
3139 *tp = new;
3141 else if (TREE_CODE_CLASS (code) == tcc_type)
3142 *walk_subtrees = 0;
3143 else if (TREE_CODE_CLASS (code) == tcc_declaration)
3144 *walk_subtrees = 0;
3145 else if (TREE_CODE_CLASS (code) == tcc_constant)
3146 *walk_subtrees = 0;
3147 else
3148 gcc_assert (code != STATEMENT_LIST);
3149 return NULL_TREE;
3152 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
3153 information indicating to what new SAVE_EXPR this one should be mapped,
3154 use that one. Otherwise, create a new node and enter it in ST. FN is
3155 the function into which the copy will be placed. */
3157 static void
3158 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
3160 struct pointer_map_t *st = (struct pointer_map_t *) st_;
3161 tree *n;
3162 tree t;
3164 /* See if we already encountered this SAVE_EXPR. */
3165 n = (tree *) pointer_map_contains (st, *tp);
3167 /* If we didn't already remap this SAVE_EXPR, do so now. */
3168 if (!n)
3170 t = copy_node (*tp);
3172 /* Remember this SAVE_EXPR. */
3173 *pointer_map_insert (st, *tp) = t;
3174 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
3175 *pointer_map_insert (st, t) = t;
3177 else
3179 /* We've already walked into this SAVE_EXPR; don't do it again. */
3180 *walk_subtrees = 0;
3181 t = *n;
3184 /* Replace this SAVE_EXPR with the copy. */
3185 *tp = t;
3188 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
3189 copies the declaration and enters it in the splay_tree in DATA (which is
3190 really an `copy_body_data *'). */
3192 static tree
3193 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3194 void *data)
3196 copy_body_data *id = (copy_body_data *) data;
3198 /* Don't walk into types. */
3199 if (TYPE_P (*tp))
3200 *walk_subtrees = 0;
3202 else if (TREE_CODE (*tp) == LABEL_EXPR)
3204 tree decl = TREE_OPERAND (*tp, 0);
3206 /* Copy the decl and remember the copy. */
3207 insert_decl_map (id, decl, id->copy_decl (decl, id));
3210 return NULL_TREE;
3213 /* Perform any modifications to EXPR required when it is unsaved. Does
3214 not recurse into EXPR's subtrees. */
3216 static void
3217 unsave_expr_1 (tree expr)
3219 switch (TREE_CODE (expr))
3221 case TARGET_EXPR:
3222 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3223 It's OK for this to happen if it was part of a subtree that
3224 isn't immediately expanded, such as operand 2 of another
3225 TARGET_EXPR. */
3226 if (TREE_OPERAND (expr, 1))
3227 break;
3229 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3230 TREE_OPERAND (expr, 3) = NULL_TREE;
3231 break;
3233 default:
3234 break;
3238 /* Called via walk_tree when an expression is unsaved. Using the
3239 splay_tree pointed to by ST (which is really a `splay_tree'),
3240 remaps all local declarations to appropriate replacements. */
3242 static tree
3243 unsave_r (tree *tp, int *walk_subtrees, void *data)
3245 copy_body_data *id = (copy_body_data *) data;
3246 struct pointer_map_t *st = id->decl_map;
3247 tree *n;
3249 /* Only a local declaration (variable or label). */
3250 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
3251 || TREE_CODE (*tp) == LABEL_DECL)
3253 /* Lookup the declaration. */
3254 n = (tree *) pointer_map_contains (st, *tp);
3256 /* If it's there, remap it. */
3257 if (n)
3258 *tp = *n;
3261 else if (TREE_CODE (*tp) == STATEMENT_LIST)
3262 copy_statement_list (tp);
3263 else if (TREE_CODE (*tp) == BIND_EXPR)
3264 copy_bind_expr (tp, walk_subtrees, id);
3265 else if (TREE_CODE (*tp) == SAVE_EXPR)
3266 remap_save_expr (tp, st, walk_subtrees);
3267 else
3269 copy_tree_r (tp, walk_subtrees, NULL);
3271 /* Do whatever unsaving is required. */
3272 unsave_expr_1 (*tp);
3275 /* Keep iterating. */
3276 return NULL_TREE;
3279 /* Copies everything in EXPR and replaces variables, labels
3280 and SAVE_EXPRs local to EXPR. */
3282 tree
3283 unsave_expr_now (tree expr)
3285 copy_body_data id;
3287 /* There's nothing to do for NULL_TREE. */
3288 if (expr == 0)
3289 return expr;
3291 /* Set up ID. */
3292 memset (&id, 0, sizeof (id));
3293 id.src_fn = current_function_decl;
3294 id.dst_fn = current_function_decl;
3295 id.decl_map = pointer_map_create ();
3297 id.copy_decl = copy_decl_no_change;
3298 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3299 id.transform_new_cfg = false;
3300 id.transform_return_to_modify = false;
3301 id.transform_lang_insert_block = false;
3303 /* Walk the tree once to find local labels. */
3304 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
3306 /* Walk the tree again, copying, remapping, and unsaving. */
3307 walk_tree (&expr, unsave_r, &id, NULL);
3309 /* Clean up. */
3310 pointer_map_destroy (id.decl_map);
3312 return expr;
3315 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
3317 static tree
3318 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
3320 if (*tp == data)
3321 return (tree) data;
3322 else
3323 return NULL;
3326 bool
3327 debug_find_tree (tree top, tree search)
3329 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
3333 /* Declare the variables created by the inliner. Add all the variables in
3334 VARS to BIND_EXPR. */
3336 static void
3337 declare_inline_vars (tree block, tree vars)
3339 tree t;
3340 for (t = vars; t; t = TREE_CHAIN (t))
3342 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
3343 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
3344 cfun->unexpanded_var_list =
3345 tree_cons (NULL_TREE, t,
3346 cfun->unexpanded_var_list);
3349 if (block)
3350 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
3354 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
3355 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
3356 VAR_DECL translation. */
3358 static tree
3359 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
3361 /* Don't generate debug information for the copy if we wouldn't have
3362 generated it for the copy either. */
3363 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
3364 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
3366 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
3367 declaration inspired this copy. */
3368 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
3370 /* The new variable/label has no RTL, yet. */
3371 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
3372 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
3373 SET_DECL_RTL (copy, NULL_RTX);
3375 /* These args would always appear unused, if not for this. */
3376 TREE_USED (copy) = 1;
3378 /* Set the context for the new declaration. */
3379 if (!DECL_CONTEXT (decl))
3380 /* Globals stay global. */
3382 else if (DECL_CONTEXT (decl) != id->src_fn)
3383 /* Things that weren't in the scope of the function we're inlining
3384 from aren't in the scope we're inlining to, either. */
3386 else if (TREE_STATIC (decl))
3387 /* Function-scoped static variables should stay in the original
3388 function. */
3390 else
3391 /* Ordinary automatic local variables are now in the scope of the
3392 new function. */
3393 DECL_CONTEXT (copy) = id->dst_fn;
3395 return copy;
3398 static tree
3399 copy_decl_to_var (tree decl, copy_body_data *id)
3401 tree copy, type;
3403 gcc_assert (TREE_CODE (decl) == PARM_DECL
3404 || TREE_CODE (decl) == RESULT_DECL);
3406 type = TREE_TYPE (decl);
3408 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3409 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3410 TREE_READONLY (copy) = TREE_READONLY (decl);
3411 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3412 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3413 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
3415 return copy_decl_for_dup_finish (id, decl, copy);
3418 /* Like copy_decl_to_var, but create a return slot object instead of a
3419 pointer variable for return by invisible reference. */
3421 static tree
3422 copy_result_decl_to_var (tree decl, copy_body_data *id)
3424 tree copy, type;
3426 gcc_assert (TREE_CODE (decl) == PARM_DECL
3427 || TREE_CODE (decl) == RESULT_DECL);
3429 type = TREE_TYPE (decl);
3430 if (DECL_BY_REFERENCE (decl))
3431 type = TREE_TYPE (type);
3433 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3434 TREE_READONLY (copy) = TREE_READONLY (decl);
3435 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3436 if (!DECL_BY_REFERENCE (decl))
3438 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3439 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3440 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
3443 return copy_decl_for_dup_finish (id, decl, copy);
3447 static tree
3448 copy_decl_no_change (tree decl, copy_body_data *id)
3450 tree copy;
3452 copy = copy_node (decl);
3454 /* The COPY is not abstract; it will be generated in DST_FN. */
3455 DECL_ABSTRACT (copy) = 0;
3456 lang_hooks.dup_lang_specific_decl (copy);
3458 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
3459 been taken; it's for internal bookkeeping in expand_goto_internal. */
3460 if (TREE_CODE (copy) == LABEL_DECL)
3462 TREE_ADDRESSABLE (copy) = 0;
3463 LABEL_DECL_UID (copy) = -1;
3466 return copy_decl_for_dup_finish (id, decl, copy);
3469 static tree
3470 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
3472 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
3473 return copy_decl_to_var (decl, id);
3474 else
3475 return copy_decl_no_change (decl, id);
3478 /* Return a copy of the function's argument tree. */
3479 static tree
3480 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id)
3482 tree *arg_copy, *parg;
3484 arg_copy = &orig_parm;
3485 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
3487 tree new = remap_decl (*parg, id);
3488 lang_hooks.dup_lang_specific_decl (new);
3489 TREE_CHAIN (new) = TREE_CHAIN (*parg);
3490 *parg = new;
3492 return orig_parm;
3495 /* Return a copy of the function's static chain. */
3496 static tree
3497 copy_static_chain (tree static_chain, copy_body_data * id)
3499 tree *chain_copy, *pvar;
3501 chain_copy = &static_chain;
3502 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
3504 tree new = remap_decl (*pvar, id);
3505 lang_hooks.dup_lang_specific_decl (new);
3506 TREE_CHAIN (new) = TREE_CHAIN (*pvar);
3507 *pvar = new;
3509 return static_chain;
3512 /* Return true if the function is allowed to be versioned.
3513 This is a guard for the versioning functionality. */
3514 bool
3515 tree_versionable_function_p (tree fndecl)
3517 if (fndecl == NULL_TREE)
3518 return false;
3519 /* ??? There are cases where a function is
3520 uninlinable but can be versioned. */
3521 if (!tree_inlinable_function_p (fndecl))
3522 return false;
3524 return true;
3527 /* Create a copy of a function's tree.
3528 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
3529 of the original function and the new copied function
3530 respectively. In case we want to replace a DECL
3531 tree with another tree while duplicating the function's
3532 body, TREE_MAP represents the mapping between these
3533 trees. If UPDATE_CLONES is set, the call_stmt fields
3534 of edges of clones of the function will be updated. */
3535 void
3536 tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
3537 bool update_clones)
3539 struct cgraph_node *old_version_node;
3540 struct cgraph_node *new_version_node;
3541 copy_body_data id;
3542 tree p;
3543 unsigned i;
3544 struct ipa_replace_map *replace_info;
3545 basic_block old_entry_block;
3546 tree t_step;
3547 tree old_current_function_decl = current_function_decl;
3549 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
3550 && TREE_CODE (new_decl) == FUNCTION_DECL);
3551 DECL_POSSIBLY_INLINED (old_decl) = 1;
3553 old_version_node = cgraph_node (old_decl);
3554 new_version_node = cgraph_node (new_decl);
3556 DECL_ARTIFICIAL (new_decl) = 1;
3557 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
3559 /* Prepare the data structures for the tree copy. */
3560 memset (&id, 0, sizeof (id));
3562 /* Generate a new name for the new version. */
3563 if (!update_clones)
3565 DECL_NAME (new_decl) = create_tmp_var_name (NULL);
3566 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
3567 SET_DECL_RTL (new_decl, NULL_RTX);
3568 id.statements_to_fold = pointer_set_create ();
3571 id.decl_map = pointer_map_create ();
3572 id.src_fn = old_decl;
3573 id.dst_fn = new_decl;
3574 id.src_node = old_version_node;
3575 id.dst_node = new_version_node;
3576 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
3578 id.copy_decl = copy_decl_no_change;
3579 id.transform_call_graph_edges
3580 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
3581 id.transform_new_cfg = true;
3582 id.transform_return_to_modify = false;
3583 id.transform_lang_insert_block = false;
3585 current_function_decl = new_decl;
3586 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
3587 (DECL_STRUCT_FUNCTION (old_decl));
3588 initialize_cfun (new_decl, old_decl,
3589 old_entry_block->count,
3590 old_entry_block->frequency);
3591 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
3593 /* Copy the function's static chain. */
3594 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
3595 if (p)
3596 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
3597 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
3598 &id);
3599 /* Copy the function's arguments. */
3600 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
3601 DECL_ARGUMENTS (new_decl) =
3602 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
3604 /* If there's a tree_map, prepare for substitution. */
3605 if (tree_map)
3606 for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
3608 replace_info = VARRAY_GENERIC_PTR (tree_map, i);
3609 if (replace_info->replace_p)
3610 insert_decl_map (&id, replace_info->old_tree,
3611 replace_info->new_tree);
3614 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
3616 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3617 number_blocks (id.dst_fn);
3619 if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
3620 /* Add local vars. */
3621 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
3622 t_step; t_step = TREE_CHAIN (t_step))
3624 tree var = TREE_VALUE (t_step);
3625 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3626 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
3627 cfun->unexpanded_var_list);
3628 else
3629 cfun->unexpanded_var_list =
3630 tree_cons (NULL_TREE, remap_decl (var, &id),
3631 cfun->unexpanded_var_list);
3634 /* Copy the Function's body. */
3635 copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
3637 if (DECL_RESULT (old_decl) != NULL_TREE)
3639 tree *res_decl = &DECL_RESULT (old_decl);
3640 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
3641 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
3644 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3645 number_blocks (new_decl);
3647 /* Clean up. */
3648 pointer_map_destroy (id.decl_map);
3649 if (!update_clones)
3651 fold_marked_statements (0, id.statements_to_fold);
3652 pointer_set_destroy (id.statements_to_fold);
3653 fold_cond_expr_cond ();
3655 if (gimple_in_ssa_p (cfun))
3657 free_dominance_info (CDI_DOMINATORS);
3658 free_dominance_info (CDI_POST_DOMINATORS);
3659 if (!update_clones)
3660 delete_unreachable_blocks ();
3661 update_ssa (TODO_update_ssa);
3662 if (!update_clones)
3664 fold_cond_expr_cond ();
3665 if (need_ssa_update_p ())
3666 update_ssa (TODO_update_ssa);
3669 free_dominance_info (CDI_DOMINATORS);
3670 free_dominance_info (CDI_POST_DOMINATORS);
3671 pop_cfun ();
3672 current_function_decl = old_current_function_decl;
3673 gcc_assert (!current_function_decl
3674 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
3675 return;
3678 /* Duplicate a type, fields and all. */
3680 tree
3681 build_duplicate_type (tree type)
3683 struct copy_body_data id;
3685 memset (&id, 0, sizeof (id));
3686 id.src_fn = current_function_decl;
3687 id.dst_fn = current_function_decl;
3688 id.src_cfun = cfun;
3689 id.decl_map = pointer_map_create ();
3690 id.copy_decl = copy_decl_no_change;
3692 type = remap_type_1 (type, &id);
3694 pointer_map_destroy (id.decl_map);
3696 return type;