re PR c++/43108 (mixed complex<T> multiplication horribly inefficient)
[official-gcc.git] / gcc / tree-inline.c
blob9c560b1056c7d491d15ddaab015380b1ed26189e
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "hashtab.h"
36 #include "langhooks.h"
37 #include "basic-block.h"
38 #include "tree-iterator.h"
39 #include "cgraph.h"
40 #include "intl.h"
41 #include "tree-mudflap.h"
42 #include "tree-flow.h"
43 #include "function.h"
44 #include "ggc.h"
45 #include "tree-flow.h"
46 #include "diagnostic.h"
47 #include "except.h"
48 #include "debug.h"
49 #include "pointer-set.h"
50 #include "ipa-prop.h"
51 #include "value-prof.h"
52 #include "tree-pass.h"
53 #include "target.h"
54 #include "integrate.h"
56 /* I'm not real happy about this, but we need to handle gimple and
57 non-gimple trees. */
58 #include "gimple.h"
60 /* Inlining, Cloning, Versioning, Parallelization
62 Inlining: a function body is duplicated, but the PARM_DECLs are
63 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64 MODIFY_EXPRs that store to a dedicated returned-value variable.
65 The duplicated eh_region info of the copy will later be appended
66 to the info for the caller; the eh_region info in copied throwing
67 statements and RESX statements are adjusted accordingly.
69 Cloning: (only in C++) We have one body for a con/de/structor, and
70 multiple function decls, each with a unique parameter list.
71 Duplicate the body, using the given splay tree; some parameters
72 will become constants (like 0 or 1).
74 Versioning: a function body is duplicated and the result is a new
75 function rather than into blocks of an existing function as with
76 inlining. Some parameters will become constants.
78 Parallelization: a region of a function is duplicated resulting in
79 a new function. Variables may be replaced with complex expressions
80 to enable shared variable semantics.
82 All of these will simultaneously lookup any callgraph edges. If
83 we're going to inline the duplicated function body, and the given
84 function has some cloned callgraph nodes (one for each place this
85 function will be inlined) those callgraph edges will be duplicated.
86 If we're cloning the body, those callgraph edges will be
87 updated to point into the new body. (Note that the original
88 callgraph node and edge list will not be altered.)
90 See the CALL_EXPR handling case in copy_tree_body_r (). */
92 /* To Do:
94 o In order to make inlining-on-trees work, we pessimized
95 function-local static constants. In particular, they are now
96 always output, even when not addressed. Fix this by treating
97 function-local static constants just like global static
98 constants; the back-end already knows not to output them if they
99 are not needed.
101 o Provide heuristics to clamp inlining of recursive template
102 calls? */
105 /* Weights that estimate_num_insns uses for heuristics in inlining. */
107 eni_weights eni_inlining_weights;
109 /* Weights that estimate_num_insns uses to estimate the size of the
110 produced code. */
112 eni_weights eni_size_weights;
114 /* Weights that estimate_num_insns uses to estimate the time necessary
115 to execute the produced code. */
117 eni_weights eni_time_weights;
119 /* Prototypes. */
121 static tree declare_return_variable (copy_body_data *, tree, tree);
122 static void remap_block (tree *, copy_body_data *);
123 static void copy_bind_expr (tree *, int *, copy_body_data *);
124 static tree mark_local_for_remap_r (tree *, int *, void *);
125 static void unsave_expr_1 (tree);
126 static tree unsave_r (tree *, int *, void *);
127 static void declare_inline_vars (tree, tree);
128 static void remap_save_expr (tree *, void *, int *);
129 static void prepend_lexical_block (tree current_block, tree new_block);
130 static tree copy_decl_to_var (tree, copy_body_data *);
131 static tree copy_result_decl_to_var (tree, copy_body_data *);
132 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133 static gimple remap_gimple_stmt (gimple, copy_body_data *);
134 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
136 /* Insert a tree->tree mapping for ID. Despite the name suggests
137 that the trees should be variables, it is used for more than that. */
139 void
140 insert_decl_map (copy_body_data *id, tree key, tree value)
142 *pointer_map_insert (id->decl_map, key) = value;
144 /* Always insert an identity map as well. If we see this same new
145 node again, we won't want to duplicate it a second time. */
146 if (key != value)
147 *pointer_map_insert (id->decl_map, value) = value;
150 /* Insert a tree->tree mapping for ID. This is only used for
151 variables. */
153 static void
154 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 if (!gimple_in_ssa_p (id->src_cfun))
157 return;
159 if (!MAY_HAVE_DEBUG_STMTS)
160 return;
162 if (!target_for_debug_bind (key))
163 return;
165 gcc_assert (TREE_CODE (key) == PARM_DECL);
166 gcc_assert (TREE_CODE (value) == VAR_DECL);
168 if (!id->debug_map)
169 id->debug_map = pointer_map_create ();
171 *pointer_map_insert (id->debug_map, key) = value;
174 /* If nonzero, we're remapping the contents of inlined debug
175 statements. If negative, an error has occurred, such as a
176 reference to a variable that isn't available in the inlined
177 context. */
178 static int processing_debug_stmt = 0;
180 /* Construct new SSA name for old NAME. ID is the inline context. */
182 static tree
183 remap_ssa_name (tree name, copy_body_data *id)
185 tree new_tree;
186 tree *n;
188 gcc_assert (TREE_CODE (name) == SSA_NAME);
190 n = (tree *) pointer_map_contains (id->decl_map, name);
191 if (n)
192 return unshare_expr (*n);
194 if (processing_debug_stmt)
196 processing_debug_stmt = -1;
197 return name;
200 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
201 in copy_bb. */
202 new_tree = remap_decl (SSA_NAME_VAR (name), id);
204 /* We might've substituted constant or another SSA_NAME for
205 the variable.
207 Replace the SSA name representing RESULT_DECL by variable during
208 inlining: this saves us from need to introduce PHI node in a case
209 return value is just partly initialized. */
210 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
211 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
212 || !id->transform_return_to_modify))
214 new_tree = make_ssa_name (new_tree, NULL);
215 insert_decl_map (id, name, new_tree);
216 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
217 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
218 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
219 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
221 /* By inlining function having uninitialized variable, we might
222 extend the lifetime (variable might get reused). This cause
223 ICE in the case we end up extending lifetime of SSA name across
224 abnormal edge, but also increase register pressure.
226 We simply initialize all uninitialized vars by 0 except
227 for case we are inlining to very first BB. We can avoid
228 this for all BBs that are not inside strongly connected
229 regions of the CFG, but this is expensive to test. */
230 if (id->entry_bb
231 && is_gimple_reg (SSA_NAME_VAR (name))
232 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
233 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
234 || EDGE_COUNT (id->entry_bb->preds) != 1))
236 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
237 gimple init_stmt;
239 init_stmt = gimple_build_assign (new_tree,
240 fold_convert (TREE_TYPE (new_tree),
241 integer_zero_node));
242 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
243 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
245 else
247 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
248 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
249 == name)
250 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
254 else
255 insert_decl_map (id, name, new_tree);
256 return new_tree;
259 /* Remap DECL during the copying of the BLOCK tree for the function. */
261 tree
262 remap_decl (tree decl, copy_body_data *id)
264 tree *n;
266 /* We only remap local variables in the current function. */
268 /* See if we have remapped this declaration. */
270 n = (tree *) pointer_map_contains (id->decl_map, decl);
272 if (!n && processing_debug_stmt)
274 processing_debug_stmt = -1;
275 return decl;
278 /* If we didn't already have an equivalent for this declaration,
279 create one now. */
280 if (!n)
282 /* Make a copy of the variable or label. */
283 tree t = id->copy_decl (decl, id);
285 /* Remember it, so that if we encounter this local entity again
286 we can reuse this copy. Do this early because remap_type may
287 need this decl for TYPE_STUB_DECL. */
288 insert_decl_map (id, decl, t);
290 if (!DECL_P (t))
291 return t;
293 /* Remap types, if necessary. */
294 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
295 if (TREE_CODE (t) == TYPE_DECL)
296 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
298 /* Remap sizes as necessary. */
299 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
300 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
302 /* If fields, do likewise for offset and qualifier. */
303 if (TREE_CODE (t) == FIELD_DECL)
305 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
306 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
307 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
310 if (cfun && gimple_in_ssa_p (cfun)
311 && (TREE_CODE (t) == VAR_DECL
312 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
314 get_var_ann (t);
315 add_referenced_var (t);
317 return t;
320 if (id->do_not_unshare)
321 return *n;
322 else
323 return unshare_expr (*n);
326 static tree
327 remap_type_1 (tree type, copy_body_data *id)
329 tree new_tree, t;
331 /* We do need a copy. build and register it now. If this is a pointer or
332 reference type, remap the designated type and make a new pointer or
333 reference type. */
334 if (TREE_CODE (type) == POINTER_TYPE)
336 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
337 TYPE_MODE (type),
338 TYPE_REF_CAN_ALIAS_ALL (type));
339 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
340 new_tree = build_type_attribute_qual_variant (new_tree,
341 TYPE_ATTRIBUTES (type),
342 TYPE_QUALS (type));
343 insert_decl_map (id, type, new_tree);
344 return new_tree;
346 else if (TREE_CODE (type) == REFERENCE_TYPE)
348 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
349 TYPE_MODE (type),
350 TYPE_REF_CAN_ALIAS_ALL (type));
351 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
352 new_tree = build_type_attribute_qual_variant (new_tree,
353 TYPE_ATTRIBUTES (type),
354 TYPE_QUALS (type));
355 insert_decl_map (id, type, new_tree);
356 return new_tree;
358 else
359 new_tree = copy_node (type);
361 insert_decl_map (id, type, new_tree);
363 /* This is a new type, not a copy of an old type. Need to reassociate
364 variants. We can handle everything except the main variant lazily. */
365 t = TYPE_MAIN_VARIANT (type);
366 if (type != t)
368 t = remap_type (t, id);
369 TYPE_MAIN_VARIANT (new_tree) = t;
370 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
371 TYPE_NEXT_VARIANT (t) = new_tree;
373 else
375 TYPE_MAIN_VARIANT (new_tree) = new_tree;
376 TYPE_NEXT_VARIANT (new_tree) = NULL;
379 if (TYPE_STUB_DECL (type))
380 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
382 /* Lazily create pointer and reference types. */
383 TYPE_POINTER_TO (new_tree) = NULL;
384 TYPE_REFERENCE_TO (new_tree) = NULL;
386 switch (TREE_CODE (new_tree))
388 case INTEGER_TYPE:
389 case REAL_TYPE:
390 case FIXED_POINT_TYPE:
391 case ENUMERAL_TYPE:
392 case BOOLEAN_TYPE:
393 t = TYPE_MIN_VALUE (new_tree);
394 if (t && TREE_CODE (t) != INTEGER_CST)
395 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
397 t = TYPE_MAX_VALUE (new_tree);
398 if (t && TREE_CODE (t) != INTEGER_CST)
399 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
400 return new_tree;
402 case FUNCTION_TYPE:
403 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
404 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
405 return new_tree;
407 case ARRAY_TYPE:
408 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
409 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
410 break;
412 case RECORD_TYPE:
413 case UNION_TYPE:
414 case QUAL_UNION_TYPE:
416 tree f, nf = NULL;
418 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
420 t = remap_decl (f, id);
421 DECL_CONTEXT (t) = new_tree;
422 TREE_CHAIN (t) = nf;
423 nf = t;
425 TYPE_FIELDS (new_tree) = nreverse (nf);
427 break;
429 case OFFSET_TYPE:
430 default:
431 /* Shouldn't have been thought variable sized. */
432 gcc_unreachable ();
435 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
436 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
438 return new_tree;
441 tree
442 remap_type (tree type, copy_body_data *id)
444 tree *node;
445 tree tmp;
447 if (type == NULL)
448 return type;
450 /* See if we have remapped this type. */
451 node = (tree *) pointer_map_contains (id->decl_map, type);
452 if (node)
453 return *node;
455 /* The type only needs remapping if it's variably modified. */
456 if (! variably_modified_type_p (type, id->src_fn))
458 insert_decl_map (id, type, type);
459 return type;
462 id->remapping_type_depth++;
463 tmp = remap_type_1 (type, id);
464 id->remapping_type_depth--;
466 return tmp;
469 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
470 is NULL or TYPE has not been remapped before. */
472 static tree
473 remapped_type (tree type, copy_body_data *id)
475 tree *node;
477 if (type == NULL)
478 return type;
480 /* See if we have remapped this type. */
481 node = (tree *) pointer_map_contains (id->decl_map, type);
482 if (node)
483 return *node;
484 else
485 return NULL;
488 /* The type only needs remapping if it's variably modified. */
489 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
491 static bool
492 can_be_nonlocal (tree decl, copy_body_data *id)
494 /* We can not duplicate function decls. */
495 if (TREE_CODE (decl) == FUNCTION_DECL)
496 return true;
498 /* Local static vars must be non-local or we get multiple declaration
499 problems. */
500 if (TREE_CODE (decl) == VAR_DECL
501 && !auto_var_in_fn_p (decl, id->src_fn))
502 return true;
504 /* At the moment dwarf2out can handle only these types of nodes. We
505 can support more later. */
506 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
507 return false;
509 /* We must use global type. We call remapped_type instead of
510 remap_type since we don't want to remap this type here if it
511 hasn't been remapped before. */
512 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
513 return false;
515 /* Wihtout SSA we can't tell if variable is used. */
516 if (!gimple_in_ssa_p (cfun))
517 return false;
519 /* Live variables must be copied so we can attach DECL_RTL. */
520 if (var_ann (decl))
521 return false;
523 return true;
526 static tree
527 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
529 tree old_var;
530 tree new_decls = NULL_TREE;
532 /* Remap its variables. */
533 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
535 tree new_var;
536 tree origin_var = DECL_ORIGIN (old_var);
538 if (can_be_nonlocal (old_var, id))
540 if (TREE_CODE (old_var) == VAR_DECL
541 && ! DECL_EXTERNAL (old_var)
542 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
543 cfun->local_decls = tree_cons (NULL_TREE, old_var,
544 cfun->local_decls);
545 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
546 && !DECL_IGNORED_P (old_var)
547 && nonlocalized_list)
548 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
549 continue;
552 /* Remap the variable. */
553 new_var = remap_decl (old_var, id);
555 /* If we didn't remap this variable, we can't mess with its
556 TREE_CHAIN. If we remapped this variable to the return slot, it's
557 already declared somewhere else, so don't declare it here. */
559 if (new_var == id->retvar)
561 else if (!new_var)
563 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
564 && !DECL_IGNORED_P (old_var)
565 && nonlocalized_list)
566 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
568 else
570 gcc_assert (DECL_P (new_var));
571 TREE_CHAIN (new_var) = new_decls;
572 new_decls = new_var;
576 return nreverse (new_decls);
579 /* Copy the BLOCK to contain remapped versions of the variables
580 therein. And hook the new block into the block-tree. */
582 static void
583 remap_block (tree *block, copy_body_data *id)
585 tree old_block;
586 tree new_block;
588 /* Make the new block. */
589 old_block = *block;
590 new_block = make_node (BLOCK);
591 TREE_USED (new_block) = TREE_USED (old_block);
592 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
593 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
594 BLOCK_NONLOCALIZED_VARS (new_block)
595 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
596 *block = new_block;
598 /* Remap its variables. */
599 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
600 &BLOCK_NONLOCALIZED_VARS (new_block),
601 id);
603 if (id->transform_lang_insert_block)
604 id->transform_lang_insert_block (new_block);
606 /* Remember the remapped block. */
607 insert_decl_map (id, old_block, new_block);
610 /* Copy the whole block tree and root it in id->block. */
611 static tree
612 remap_blocks (tree block, copy_body_data *id)
614 tree t;
615 tree new_tree = block;
617 if (!block)
618 return NULL;
620 remap_block (&new_tree, id);
621 gcc_assert (new_tree != block);
622 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
623 prepend_lexical_block (new_tree, remap_blocks (t, id));
624 /* Blocks are in arbitrary order, but make things slightly prettier and do
625 not swap order when producing a copy. */
626 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
627 return new_tree;
630 static void
631 copy_statement_list (tree *tp)
633 tree_stmt_iterator oi, ni;
634 tree new_tree;
636 new_tree = alloc_stmt_list ();
637 ni = tsi_start (new_tree);
638 oi = tsi_start (*tp);
639 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
640 *tp = new_tree;
642 for (; !tsi_end_p (oi); tsi_next (&oi))
644 tree stmt = tsi_stmt (oi);
645 if (TREE_CODE (stmt) == STATEMENT_LIST)
646 copy_statement_list (&stmt);
647 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
651 static void
652 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
654 tree block = BIND_EXPR_BLOCK (*tp);
655 /* Copy (and replace) the statement. */
656 copy_tree_r (tp, walk_subtrees, NULL);
657 if (block)
659 remap_block (&block, id);
660 BIND_EXPR_BLOCK (*tp) = block;
663 if (BIND_EXPR_VARS (*tp))
664 /* This will remap a lot of the same decls again, but this should be
665 harmless. */
666 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
670 /* Create a new gimple_seq by remapping all the statements in BODY
671 using the inlining information in ID. */
673 gimple_seq
674 remap_gimple_seq (gimple_seq body, copy_body_data *id)
676 gimple_stmt_iterator si;
677 gimple_seq new_body = NULL;
679 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
681 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
682 gimple_seq_add_stmt (&new_body, new_stmt);
685 return new_body;
689 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
690 block using the mapping information in ID. */
692 static gimple
693 copy_gimple_bind (gimple stmt, copy_body_data *id)
695 gimple new_bind;
696 tree new_block, new_vars;
697 gimple_seq body, new_body;
699 /* Copy the statement. Note that we purposely don't use copy_stmt
700 here because we need to remap statements as we copy. */
701 body = gimple_bind_body (stmt);
702 new_body = remap_gimple_seq (body, id);
704 new_block = gimple_bind_block (stmt);
705 if (new_block)
706 remap_block (&new_block, id);
708 /* This will remap a lot of the same decls again, but this should be
709 harmless. */
710 new_vars = gimple_bind_vars (stmt);
711 if (new_vars)
712 new_vars = remap_decls (new_vars, NULL, id);
714 new_bind = gimple_build_bind (new_vars, new_body, new_block);
716 return new_bind;
720 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
721 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
722 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
723 recursing into the children nodes of *TP. */
725 static tree
726 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
728 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
729 copy_body_data *id = (copy_body_data *) wi_p->info;
730 tree fn = id->src_fn;
732 if (TREE_CODE (*tp) == SSA_NAME)
734 *tp = remap_ssa_name (*tp, id);
735 *walk_subtrees = 0;
736 return NULL;
738 else if (auto_var_in_fn_p (*tp, fn))
740 /* Local variables and labels need to be replaced by equivalent
741 variables. We don't want to copy static variables; there's
742 only one of those, no matter how many times we inline the
743 containing function. Similarly for globals from an outer
744 function. */
745 tree new_decl;
747 /* Remap the declaration. */
748 new_decl = remap_decl (*tp, id);
749 gcc_assert (new_decl);
750 /* Replace this variable with the copy. */
751 STRIP_TYPE_NOPS (new_decl);
752 /* ??? The C++ frontend uses void * pointer zero to initialize
753 any other type. This confuses the middle-end type verification.
754 As cloned bodies do not go through gimplification again the fixup
755 there doesn't trigger. */
756 if (TREE_CODE (new_decl) == INTEGER_CST
757 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
758 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
759 *tp = new_decl;
760 *walk_subtrees = 0;
762 else if (TREE_CODE (*tp) == STATEMENT_LIST)
763 gcc_unreachable ();
764 else if (TREE_CODE (*tp) == SAVE_EXPR)
765 gcc_unreachable ();
766 else if (TREE_CODE (*tp) == LABEL_DECL
767 && (!DECL_CONTEXT (*tp)
768 || decl_function_context (*tp) == id->src_fn))
769 /* These may need to be remapped for EH handling. */
770 *tp = remap_decl (*tp, id);
771 else if (TYPE_P (*tp))
772 /* Types may need remapping as well. */
773 *tp = remap_type (*tp, id);
774 else if (CONSTANT_CLASS_P (*tp))
776 /* If this is a constant, we have to copy the node iff the type
777 will be remapped. copy_tree_r will not copy a constant. */
778 tree new_type = remap_type (TREE_TYPE (*tp), id);
780 if (new_type == TREE_TYPE (*tp))
781 *walk_subtrees = 0;
783 else if (TREE_CODE (*tp) == INTEGER_CST)
784 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
785 TREE_INT_CST_HIGH (*tp));
786 else
788 *tp = copy_node (*tp);
789 TREE_TYPE (*tp) = new_type;
792 else
794 /* Otherwise, just copy the node. Note that copy_tree_r already
795 knows not to copy VAR_DECLs, etc., so this is safe. */
796 if (TREE_CODE (*tp) == INDIRECT_REF)
798 /* Get rid of *& from inline substitutions that can happen when a
799 pointer argument is an ADDR_EXPR. */
800 tree decl = TREE_OPERAND (*tp, 0);
801 tree *n;
803 n = (tree *) pointer_map_contains (id->decl_map, decl);
804 if (n)
806 tree type, new_tree, old;
808 /* If we happen to get an ADDR_EXPR in n->value, strip
809 it manually here as we'll eventually get ADDR_EXPRs
810 which lie about their types pointed to. In this case
811 build_fold_indirect_ref wouldn't strip the
812 INDIRECT_REF, but we absolutely rely on that. As
813 fold_indirect_ref does other useful transformations,
814 try that first, though. */
815 type = TREE_TYPE (TREE_TYPE (*n));
816 new_tree = unshare_expr (*n);
817 old = *tp;
818 *tp = gimple_fold_indirect_ref (new_tree);
819 if (!*tp)
821 if (TREE_CODE (new_tree) == ADDR_EXPR)
823 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
824 type, new_tree);
825 /* ??? We should either assert here or build
826 a VIEW_CONVERT_EXPR instead of blindly leaking
827 incompatible types to our IL. */
828 if (! *tp)
829 *tp = TREE_OPERAND (new_tree, 0);
831 else
833 *tp = build1 (INDIRECT_REF, type, new_tree);
834 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
835 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
838 *walk_subtrees = 0;
839 return NULL;
843 /* Here is the "usual case". Copy this tree node, and then
844 tweak some special cases. */
845 copy_tree_r (tp, walk_subtrees, NULL);
847 /* Global variables we haven't seen yet need to go into referenced
848 vars. If not referenced from types only. */
849 if (gimple_in_ssa_p (cfun)
850 && TREE_CODE (*tp) == VAR_DECL
851 && id->remapping_type_depth == 0
852 && !processing_debug_stmt)
853 add_referenced_var (*tp);
855 /* We should never have TREE_BLOCK set on non-statements. */
856 if (EXPR_P (*tp))
857 gcc_assert (!TREE_BLOCK (*tp));
859 if (TREE_CODE (*tp) != OMP_CLAUSE)
860 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
862 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
864 /* The copied TARGET_EXPR has never been expanded, even if the
865 original node was expanded already. */
866 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
867 TREE_OPERAND (*tp, 3) = NULL_TREE;
869 else if (TREE_CODE (*tp) == ADDR_EXPR)
871 /* Variable substitution need not be simple. In particular,
872 the INDIRECT_REF substitution above. Make sure that
873 TREE_CONSTANT and friends are up-to-date. But make sure
874 to not improperly set TREE_BLOCK on some sub-expressions. */
875 int invariant = is_gimple_min_invariant (*tp);
876 tree block = id->block;
877 id->block = NULL_TREE;
878 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
879 id->block = block;
881 /* Handle the case where we substituted an INDIRECT_REF
882 into the operand of the ADDR_EXPR. */
883 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
884 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
885 else
886 recompute_tree_invariant_for_addr_expr (*tp);
888 /* If this used to be invariant, but is not any longer,
889 then regimplification is probably needed. */
890 if (invariant && !is_gimple_min_invariant (*tp))
891 id->regimplify = true;
893 *walk_subtrees = 0;
897 /* Keep iterating. */
898 return NULL_TREE;
902 /* Called from copy_body_id via walk_tree. DATA is really a
903 `copy_body_data *'. */
905 tree
906 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
908 copy_body_data *id = (copy_body_data *) data;
909 tree fn = id->src_fn;
910 tree new_block;
912 /* Begin by recognizing trees that we'll completely rewrite for the
913 inlining context. Our output for these trees is completely
914 different from out input (e.g. RETURN_EXPR is deleted, and morphs
915 into an edge). Further down, we'll handle trees that get
916 duplicated and/or tweaked. */
918 /* When requested, RETURN_EXPRs should be transformed to just the
919 contained MODIFY_EXPR. The branch semantics of the return will
920 be handled elsewhere by manipulating the CFG rather than a statement. */
921 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
923 tree assignment = TREE_OPERAND (*tp, 0);
925 /* If we're returning something, just turn that into an
926 assignment into the equivalent of the original RESULT_DECL.
927 If the "assignment" is just the result decl, the result
928 decl has already been set (e.g. a recent "foo (&result_decl,
929 ...)"); just toss the entire RETURN_EXPR. */
930 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
932 /* Replace the RETURN_EXPR with (a copy of) the
933 MODIFY_EXPR hanging underneath. */
934 *tp = copy_node (assignment);
936 else /* Else the RETURN_EXPR returns no value. */
938 *tp = NULL;
939 return (tree) (void *)1;
942 else if (TREE_CODE (*tp) == SSA_NAME)
944 *tp = remap_ssa_name (*tp, id);
945 *walk_subtrees = 0;
946 return NULL;
949 /* Local variables and labels need to be replaced by equivalent
950 variables. We don't want to copy static variables; there's only
951 one of those, no matter how many times we inline the containing
952 function. Similarly for globals from an outer function. */
953 else if (auto_var_in_fn_p (*tp, fn))
955 tree new_decl;
957 /* Remap the declaration. */
958 new_decl = remap_decl (*tp, id);
959 gcc_assert (new_decl);
960 /* Replace this variable with the copy. */
961 STRIP_TYPE_NOPS (new_decl);
962 *tp = new_decl;
963 *walk_subtrees = 0;
965 else if (TREE_CODE (*tp) == STATEMENT_LIST)
966 copy_statement_list (tp);
967 else if (TREE_CODE (*tp) == SAVE_EXPR
968 || TREE_CODE (*tp) == TARGET_EXPR)
969 remap_save_expr (tp, id->decl_map, walk_subtrees);
970 else if (TREE_CODE (*tp) == LABEL_DECL
971 && (! DECL_CONTEXT (*tp)
972 || decl_function_context (*tp) == id->src_fn))
973 /* These may need to be remapped for EH handling. */
974 *tp = remap_decl (*tp, id);
975 else if (TREE_CODE (*tp) == BIND_EXPR)
976 copy_bind_expr (tp, walk_subtrees, id);
977 /* Types may need remapping as well. */
978 else if (TYPE_P (*tp))
979 *tp = remap_type (*tp, id);
981 /* If this is a constant, we have to copy the node iff the type will be
982 remapped. copy_tree_r will not copy a constant. */
983 else if (CONSTANT_CLASS_P (*tp))
985 tree new_type = remap_type (TREE_TYPE (*tp), id);
987 if (new_type == TREE_TYPE (*tp))
988 *walk_subtrees = 0;
990 else if (TREE_CODE (*tp) == INTEGER_CST)
991 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
992 TREE_INT_CST_HIGH (*tp));
993 else
995 *tp = copy_node (*tp);
996 TREE_TYPE (*tp) = new_type;
1000 /* Otherwise, just copy the node. Note that copy_tree_r already
1001 knows not to copy VAR_DECLs, etc., so this is safe. */
1002 else
1004 /* Here we handle trees that are not completely rewritten.
1005 First we detect some inlining-induced bogosities for
1006 discarding. */
1007 if (TREE_CODE (*tp) == MODIFY_EXPR
1008 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1009 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1011 /* Some assignments VAR = VAR; don't generate any rtl code
1012 and thus don't count as variable modification. Avoid
1013 keeping bogosities like 0 = 0. */
1014 tree decl = TREE_OPERAND (*tp, 0), value;
1015 tree *n;
1017 n = (tree *) pointer_map_contains (id->decl_map, decl);
1018 if (n)
1020 value = *n;
1021 STRIP_TYPE_NOPS (value);
1022 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1024 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1025 return copy_tree_body_r (tp, walk_subtrees, data);
1029 else if (TREE_CODE (*tp) == INDIRECT_REF)
1031 /* Get rid of *& from inline substitutions that can happen when a
1032 pointer argument is an ADDR_EXPR. */
1033 tree decl = TREE_OPERAND (*tp, 0);
1034 tree *n;
1036 n = (tree *) pointer_map_contains (id->decl_map, decl);
1037 if (n)
1039 tree new_tree;
1040 tree old;
1041 /* If we happen to get an ADDR_EXPR in n->value, strip
1042 it manually here as we'll eventually get ADDR_EXPRs
1043 which lie about their types pointed to. In this case
1044 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1045 but we absolutely rely on that. As fold_indirect_ref
1046 does other useful transformations, try that first, though. */
1047 tree type = TREE_TYPE (TREE_TYPE (*n));
1048 if (id->do_not_unshare)
1049 new_tree = *n;
1050 else
1051 new_tree = unshare_expr (*n);
1052 old = *tp;
1053 *tp = gimple_fold_indirect_ref (new_tree);
1054 if (! *tp)
1056 if (TREE_CODE (new_tree) == ADDR_EXPR)
1058 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1059 type, new_tree);
1060 /* ??? We should either assert here or build
1061 a VIEW_CONVERT_EXPR instead of blindly leaking
1062 incompatible types to our IL. */
1063 if (! *tp)
1064 *tp = TREE_OPERAND (new_tree, 0);
1066 else
1068 *tp = build1 (INDIRECT_REF, type, new_tree);
1069 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1070 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1073 *walk_subtrees = 0;
1074 return NULL;
1078 /* Here is the "usual case". Copy this tree node, and then
1079 tweak some special cases. */
1080 copy_tree_r (tp, walk_subtrees, NULL);
1082 /* Global variables we haven't seen yet needs to go into referenced
1083 vars. If not referenced from types or debug stmts only. */
1084 if (gimple_in_ssa_p (cfun)
1085 && TREE_CODE (*tp) == VAR_DECL
1086 && id->remapping_type_depth == 0
1087 && !processing_debug_stmt)
1088 add_referenced_var (*tp);
1090 /* If EXPR has block defined, map it to newly constructed block.
1091 When inlining we want EXPRs without block appear in the block
1092 of function call if we are not remapping a type. */
1093 if (EXPR_P (*tp))
1095 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1096 if (TREE_BLOCK (*tp))
1098 tree *n;
1099 n = (tree *) pointer_map_contains (id->decl_map,
1100 TREE_BLOCK (*tp));
1101 gcc_assert (n);
1102 new_block = *n;
1104 TREE_BLOCK (*tp) = new_block;
1107 if (TREE_CODE (*tp) != OMP_CLAUSE)
1108 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1110 /* The copied TARGET_EXPR has never been expanded, even if the
1111 original node was expanded already. */
1112 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1114 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1115 TREE_OPERAND (*tp, 3) = NULL_TREE;
1118 /* Variable substitution need not be simple. In particular, the
1119 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1120 and friends are up-to-date. */
1121 else if (TREE_CODE (*tp) == ADDR_EXPR)
1123 int invariant = is_gimple_min_invariant (*tp);
1124 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1126 /* Handle the case where we substituted an INDIRECT_REF
1127 into the operand of the ADDR_EXPR. */
1128 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1129 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1130 else
1131 recompute_tree_invariant_for_addr_expr (*tp);
1133 /* If this used to be invariant, but is not any longer,
1134 then regimplification is probably needed. */
1135 if (invariant && !is_gimple_min_invariant (*tp))
1136 id->regimplify = true;
1138 *walk_subtrees = 0;
1142 /* Keep iterating. */
1143 return NULL_TREE;
1146 /* Helper for remap_gimple_stmt. Given an EH region number for the
1147 source function, map that to the duplicate EH region number in
1148 the destination function. */
1150 static int
1151 remap_eh_region_nr (int old_nr, copy_body_data *id)
1153 eh_region old_r, new_r;
1154 void **slot;
1156 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1157 slot = pointer_map_contains (id->eh_map, old_r);
1158 new_r = (eh_region) *slot;
1160 return new_r->index;
1163 /* Similar, but operate on INTEGER_CSTs. */
1165 static tree
1166 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1168 int old_nr, new_nr;
1170 old_nr = tree_low_cst (old_t_nr, 0);
1171 new_nr = remap_eh_region_nr (old_nr, id);
1173 return build_int_cst (NULL, new_nr);
1176 /* Helper for copy_bb. Remap statement STMT using the inlining
1177 information in ID. Return the new statement copy. */
1179 static gimple
1180 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1182 gimple copy = NULL;
1183 struct walk_stmt_info wi;
1184 tree new_block;
1185 bool skip_first = false;
1187 /* Begin by recognizing trees that we'll completely rewrite for the
1188 inlining context. Our output for these trees is completely
1189 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1190 into an edge). Further down, we'll handle trees that get
1191 duplicated and/or tweaked. */
1193 /* When requested, GIMPLE_RETURNs should be transformed to just the
1194 contained GIMPLE_ASSIGN. The branch semantics of the return will
1195 be handled elsewhere by manipulating the CFG rather than the
1196 statement. */
1197 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1199 tree retval = gimple_return_retval (stmt);
1201 /* If we're returning something, just turn that into an
1202 assignment into the equivalent of the original RESULT_DECL.
1203 If RETVAL is just the result decl, the result decl has
1204 already been set (e.g. a recent "foo (&result_decl, ...)");
1205 just toss the entire GIMPLE_RETURN. */
1206 if (retval && TREE_CODE (retval) != RESULT_DECL)
1208 copy = gimple_build_assign (id->retvar, retval);
1209 /* id->retvar is already substituted. Skip it on later remapping. */
1210 skip_first = true;
1212 else
1213 return gimple_build_nop ();
1215 else if (gimple_has_substatements (stmt))
1217 gimple_seq s1, s2;
1219 /* When cloning bodies from the C++ front end, we will be handed bodies
1220 in High GIMPLE form. Handle here all the High GIMPLE statements that
1221 have embedded statements. */
1222 switch (gimple_code (stmt))
1224 case GIMPLE_BIND:
1225 copy = copy_gimple_bind (stmt, id);
1226 break;
1228 case GIMPLE_CATCH:
1229 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1230 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1231 break;
1233 case GIMPLE_EH_FILTER:
1234 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1235 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1236 break;
1238 case GIMPLE_TRY:
1239 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1240 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1241 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1242 break;
1244 case GIMPLE_WITH_CLEANUP_EXPR:
1245 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1246 copy = gimple_build_wce (s1);
1247 break;
1249 case GIMPLE_OMP_PARALLEL:
1250 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1251 copy = gimple_build_omp_parallel
1252 (s1,
1253 gimple_omp_parallel_clauses (stmt),
1254 gimple_omp_parallel_child_fn (stmt),
1255 gimple_omp_parallel_data_arg (stmt));
1256 break;
1258 case GIMPLE_OMP_TASK:
1259 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1260 copy = gimple_build_omp_task
1261 (s1,
1262 gimple_omp_task_clauses (stmt),
1263 gimple_omp_task_child_fn (stmt),
1264 gimple_omp_task_data_arg (stmt),
1265 gimple_omp_task_copy_fn (stmt),
1266 gimple_omp_task_arg_size (stmt),
1267 gimple_omp_task_arg_align (stmt));
1268 break;
1270 case GIMPLE_OMP_FOR:
1271 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1272 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1273 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1274 gimple_omp_for_collapse (stmt), s2);
1276 size_t i;
1277 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1279 gimple_omp_for_set_index (copy, i,
1280 gimple_omp_for_index (stmt, i));
1281 gimple_omp_for_set_initial (copy, i,
1282 gimple_omp_for_initial (stmt, i));
1283 gimple_omp_for_set_final (copy, i,
1284 gimple_omp_for_final (stmt, i));
1285 gimple_omp_for_set_incr (copy, i,
1286 gimple_omp_for_incr (stmt, i));
1287 gimple_omp_for_set_cond (copy, i,
1288 gimple_omp_for_cond (stmt, i));
1291 break;
1293 case GIMPLE_OMP_MASTER:
1294 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1295 copy = gimple_build_omp_master (s1);
1296 break;
1298 case GIMPLE_OMP_ORDERED:
1299 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1300 copy = gimple_build_omp_ordered (s1);
1301 break;
1303 case GIMPLE_OMP_SECTION:
1304 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1305 copy = gimple_build_omp_section (s1);
1306 break;
1308 case GIMPLE_OMP_SECTIONS:
1309 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1310 copy = gimple_build_omp_sections
1311 (s1, gimple_omp_sections_clauses (stmt));
1312 break;
1314 case GIMPLE_OMP_SINGLE:
1315 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1316 copy = gimple_build_omp_single
1317 (s1, gimple_omp_single_clauses (stmt));
1318 break;
1320 case GIMPLE_OMP_CRITICAL:
1321 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1322 copy
1323 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1324 break;
1326 default:
1327 gcc_unreachable ();
1330 else
1332 if (gimple_assign_copy_p (stmt)
1333 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1334 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1336 /* Here we handle statements that are not completely rewritten.
1337 First we detect some inlining-induced bogosities for
1338 discarding. */
1340 /* Some assignments VAR = VAR; don't generate any rtl code
1341 and thus don't count as variable modification. Avoid
1342 keeping bogosities like 0 = 0. */
1343 tree decl = gimple_assign_lhs (stmt), value;
1344 tree *n;
1346 n = (tree *) pointer_map_contains (id->decl_map, decl);
1347 if (n)
1349 value = *n;
1350 STRIP_TYPE_NOPS (value);
1351 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1352 return gimple_build_nop ();
1356 if (gimple_debug_bind_p (stmt))
1358 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1359 gimple_debug_bind_get_value (stmt),
1360 stmt);
1361 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1362 return copy;
1365 /* Create a new deep copy of the statement. */
1366 copy = gimple_copy (stmt);
1368 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1369 RESX and EH_DISPATCH. */
1370 if (id->eh_map)
1371 switch (gimple_code (copy))
1373 case GIMPLE_CALL:
1375 tree r, fndecl = gimple_call_fndecl (copy);
1376 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1377 switch (DECL_FUNCTION_CODE (fndecl))
1379 case BUILT_IN_EH_COPY_VALUES:
1380 r = gimple_call_arg (copy, 1);
1381 r = remap_eh_region_tree_nr (r, id);
1382 gimple_call_set_arg (copy, 1, r);
1383 /* FALLTHRU */
1385 case BUILT_IN_EH_POINTER:
1386 case BUILT_IN_EH_FILTER:
1387 r = gimple_call_arg (copy, 0);
1388 r = remap_eh_region_tree_nr (r, id);
1389 gimple_call_set_arg (copy, 0, r);
1390 break;
1392 default:
1393 break;
1396 break;
1398 case GIMPLE_RESX:
1400 int r = gimple_resx_region (copy);
1401 r = remap_eh_region_nr (r, id);
1402 gimple_resx_set_region (copy, r);
1404 break;
1406 case GIMPLE_EH_DISPATCH:
1408 int r = gimple_eh_dispatch_region (copy);
1409 r = remap_eh_region_nr (r, id);
1410 gimple_eh_dispatch_set_region (copy, r);
1412 break;
1414 default:
1415 break;
1419 /* If STMT has a block defined, map it to the newly constructed
1420 block. When inlining we want statements without a block to
1421 appear in the block of the function call. */
1422 new_block = id->block;
1423 if (gimple_block (copy))
1425 tree *n;
1426 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1427 gcc_assert (n);
1428 new_block = *n;
1431 gimple_set_block (copy, new_block);
1433 if (gimple_debug_bind_p (copy))
1434 return copy;
1436 /* Remap all the operands in COPY. */
1437 memset (&wi, 0, sizeof (wi));
1438 wi.info = id;
1439 if (skip_first)
1440 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1441 else
1442 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1444 /* Clear the copied virtual operands. We are not remapping them here
1445 but are going to recreate them from scratch. */
1446 if (gimple_has_mem_ops (copy))
1448 gimple_set_vdef (copy, NULL_TREE);
1449 gimple_set_vuse (copy, NULL_TREE);
1452 return copy;
1456 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1457 later */
1459 static basic_block
1460 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1461 gcov_type count_scale)
1463 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1464 basic_block copy_basic_block;
1465 tree decl;
1466 gcov_type freq;
1468 /* create_basic_block() will append every new block to
1469 basic_block_info automatically. */
1470 copy_basic_block = create_basic_block (NULL, (void *) 0,
1471 (basic_block) bb->prev_bb->aux);
1472 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1474 /* We are going to rebuild frequencies from scratch. These values
1475 have just small importance to drive canonicalize_loop_headers. */
1476 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1478 /* We recompute frequencies after inlining, so this is quite safe. */
1479 if (freq > BB_FREQ_MAX)
1480 freq = BB_FREQ_MAX;
1481 copy_basic_block->frequency = freq;
1483 copy_gsi = gsi_start_bb (copy_basic_block);
1485 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1487 gimple stmt = gsi_stmt (gsi);
1488 gimple orig_stmt = stmt;
1490 id->regimplify = false;
1491 stmt = remap_gimple_stmt (stmt, id);
1492 if (gimple_nop_p (stmt))
1493 continue;
1495 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1496 seq_gsi = copy_gsi;
1498 /* With return slot optimization we can end up with
1499 non-gimple (foo *)&this->m, fix that here. */
1500 if (is_gimple_assign (stmt)
1501 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1502 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1504 tree new_rhs;
1505 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1506 gimple_assign_rhs1 (stmt),
1507 true, NULL, false, GSI_NEW_STMT);
1508 gimple_assign_set_rhs1 (stmt, new_rhs);
1509 id->regimplify = false;
1512 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1514 if (id->regimplify)
1515 gimple_regimplify_operands (stmt, &seq_gsi);
1517 /* If copy_basic_block has been empty at the start of this iteration,
1518 call gsi_start_bb again to get at the newly added statements. */
1519 if (gsi_end_p (copy_gsi))
1520 copy_gsi = gsi_start_bb (copy_basic_block);
1521 else
1522 gsi_next (&copy_gsi);
1524 /* Process the new statement. The call to gimple_regimplify_operands
1525 possibly turned the statement into multiple statements, we
1526 need to process all of them. */
1529 tree fn;
1531 stmt = gsi_stmt (copy_gsi);
1532 if (is_gimple_call (stmt)
1533 && gimple_call_va_arg_pack_p (stmt)
1534 && id->gimple_call)
1536 /* __builtin_va_arg_pack () should be replaced by
1537 all arguments corresponding to ... in the caller. */
1538 tree p;
1539 gimple new_call;
1540 VEC(tree, heap) *argarray;
1541 size_t nargs = gimple_call_num_args (id->gimple_call);
1542 size_t n;
1544 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1545 nargs--;
1547 /* Create the new array of arguments. */
1548 n = nargs + gimple_call_num_args (stmt);
1549 argarray = VEC_alloc (tree, heap, n);
1550 VEC_safe_grow (tree, heap, argarray, n);
1552 /* Copy all the arguments before '...' */
1553 memcpy (VEC_address (tree, argarray),
1554 gimple_call_arg_ptr (stmt, 0),
1555 gimple_call_num_args (stmt) * sizeof (tree));
1557 /* Append the arguments passed in '...' */
1558 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1559 gimple_call_arg_ptr (id->gimple_call, 0)
1560 + (gimple_call_num_args (id->gimple_call) - nargs),
1561 nargs * sizeof (tree));
1563 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1564 argarray);
1566 VEC_free (tree, heap, argarray);
1568 /* Copy all GIMPLE_CALL flags, location and block, except
1569 GF_CALL_VA_ARG_PACK. */
1570 gimple_call_copy_flags (new_call, stmt);
1571 gimple_call_set_va_arg_pack (new_call, false);
1572 gimple_set_location (new_call, gimple_location (stmt));
1573 gimple_set_block (new_call, gimple_block (stmt));
1574 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1576 gsi_replace (&copy_gsi, new_call, false);
1577 gimple_set_bb (stmt, NULL);
1578 stmt = new_call;
1580 else if (is_gimple_call (stmt)
1581 && id->gimple_call
1582 && (decl = gimple_call_fndecl (stmt))
1583 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1584 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1586 /* __builtin_va_arg_pack_len () should be replaced by
1587 the number of anonymous arguments. */
1588 size_t nargs = gimple_call_num_args (id->gimple_call);
1589 tree count, p;
1590 gimple new_stmt;
1592 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1593 nargs--;
1595 count = build_int_cst (integer_type_node, nargs);
1596 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1597 gsi_replace (&copy_gsi, new_stmt, false);
1598 stmt = new_stmt;
1601 /* Statements produced by inlining can be unfolded, especially
1602 when we constant propagated some operands. We can't fold
1603 them right now for two reasons:
1604 1) folding require SSA_NAME_DEF_STMTs to be correct
1605 2) we can't change function calls to builtins.
1606 So we just mark statement for later folding. We mark
1607 all new statements, instead just statements that has changed
1608 by some nontrivial substitution so even statements made
1609 foldable indirectly are updated. If this turns out to be
1610 expensive, copy_body can be told to watch for nontrivial
1611 changes. */
1612 if (id->statements_to_fold)
1613 pointer_set_insert (id->statements_to_fold, stmt);
1615 /* We're duplicating a CALL_EXPR. Find any corresponding
1616 callgraph edges and update or duplicate them. */
1617 if (is_gimple_call (stmt))
1619 struct cgraph_edge *edge;
1620 int flags;
1622 switch (id->transform_call_graph_edges)
1624 case CB_CGE_DUPLICATE:
1625 edge = cgraph_edge (id->src_node, orig_stmt);
1626 if (edge)
1628 int edge_freq = edge->frequency;
1629 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1630 gimple_uid (stmt),
1631 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1632 edge->frequency, true);
1633 /* We could also just rescale the frequency, but
1634 doing so would introduce roundoff errors and make
1635 verifier unhappy. */
1636 edge->frequency
1637 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1638 copy_basic_block);
1639 if (dump_file
1640 && profile_status_for_function (cfun) != PROFILE_ABSENT
1641 && (edge_freq > edge->frequency + 10
1642 || edge_freq < edge->frequency - 10))
1644 fprintf (dump_file, "Edge frequency estimated by "
1645 "cgraph %i diverge from inliner's estimate %i\n",
1646 edge_freq,
1647 edge->frequency);
1648 fprintf (dump_file,
1649 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1650 bb->index,
1651 bb->frequency,
1652 copy_basic_block->frequency);
1655 break;
1657 case CB_CGE_MOVE_CLONES:
1658 cgraph_set_call_stmt_including_clones (id->dst_node,
1659 orig_stmt, stmt);
1660 edge = cgraph_edge (id->dst_node, stmt);
1661 break;
1663 case CB_CGE_MOVE:
1664 edge = cgraph_edge (id->dst_node, orig_stmt);
1665 if (edge)
1666 cgraph_set_call_stmt (edge, stmt);
1667 break;
1669 default:
1670 gcc_unreachable ();
1673 /* Constant propagation on argument done during inlining
1674 may create new direct call. Produce an edge for it. */
1675 if ((!edge
1676 || (edge->indirect_call
1677 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1678 && is_gimple_call (stmt)
1679 && (fn = gimple_call_fndecl (stmt)) != NULL)
1681 struct cgraph_node *dest = cgraph_node (fn);
1683 /* We have missing edge in the callgraph. This can happen
1684 when previous inlining turned an indirect call into a
1685 direct call by constant propagating arguments or we are
1686 producing dead clone (for further clonning). In all
1687 other cases we hit a bug (incorrect node sharing is the
1688 most common reason for missing edges). */
1689 gcc_assert (dest->needed || !dest->analyzed
1690 || !id->src_node->analyzed);
1691 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1692 cgraph_create_edge_including_clones
1693 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1694 compute_call_stmt_bb_frequency (id->dst_node->decl,
1695 copy_basic_block),
1696 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1697 else
1698 cgraph_create_edge (id->dst_node, dest, stmt,
1699 bb->count,
1700 compute_call_stmt_bb_frequency
1701 (id->dst_node->decl, copy_basic_block),
1702 bb->loop_depth)->inline_failed
1703 = CIF_ORIGINALLY_INDIRECT_CALL;
1704 if (dump_file)
1706 fprintf (dump_file, "Created new direct edge to %s",
1707 cgraph_node_name (dest));
1711 flags = gimple_call_flags (stmt);
1712 if (flags & ECF_MAY_BE_ALLOCA)
1713 cfun->calls_alloca = true;
1714 if (flags & ECF_RETURNS_TWICE)
1715 cfun->calls_setjmp = true;
1718 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1719 id->eh_map, id->eh_lp_nr);
1721 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1723 ssa_op_iter i;
1724 tree def;
1726 find_new_referenced_vars (gsi_stmt (copy_gsi));
1727 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1728 if (TREE_CODE (def) == SSA_NAME)
1729 SSA_NAME_DEF_STMT (def) = stmt;
1732 gsi_next (&copy_gsi);
1734 while (!gsi_end_p (copy_gsi));
1736 copy_gsi = gsi_last_bb (copy_basic_block);
1739 return copy_basic_block;
1742 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1743 form is quite easy, since dominator relationship for old basic blocks does
1744 not change.
1746 There is however exception where inlining might change dominator relation
1747 across EH edges from basic block within inlined functions destinating
1748 to landing pads in function we inline into.
1750 The function fills in PHI_RESULTs of such PHI nodes if they refer
1751 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1752 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1753 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1754 set, and this means that there will be no overlapping live ranges
1755 for the underlying symbol.
1757 This might change in future if we allow redirecting of EH edges and
1758 we might want to change way build CFG pre-inlining to include
1759 all the possible edges then. */
1760 static void
1761 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1762 bool can_throw, bool nonlocal_goto)
1764 edge e;
1765 edge_iterator ei;
1767 FOR_EACH_EDGE (e, ei, bb->succs)
1768 if (!e->dest->aux
1769 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1771 gimple phi;
1772 gimple_stmt_iterator si;
1774 if (!nonlocal_goto)
1775 gcc_assert (e->flags & EDGE_EH);
1777 if (!can_throw)
1778 gcc_assert (!(e->flags & EDGE_EH));
1780 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1782 edge re;
1784 phi = gsi_stmt (si);
1786 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1787 gcc_assert (!e->dest->aux);
1789 gcc_assert ((e->flags & EDGE_EH)
1790 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1792 if (!is_gimple_reg (PHI_RESULT (phi)))
1794 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1795 continue;
1798 re = find_edge (ret_bb, e->dest);
1799 gcc_assert (re);
1800 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1801 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1803 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1804 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1810 /* Copy edges from BB into its copy constructed earlier, scale profile
1811 accordingly. Edges will be taken care of later. Assume aux
1812 pointers to point to the copies of each BB. */
1814 static void
1815 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1817 basic_block new_bb = (basic_block) bb->aux;
1818 edge_iterator ei;
1819 edge old_edge;
1820 gimple_stmt_iterator si;
1821 int flags;
1823 /* Use the indices from the original blocks to create edges for the
1824 new ones. */
1825 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1826 if (!(old_edge->flags & EDGE_EH))
1828 edge new_edge;
1830 flags = old_edge->flags;
1832 /* Return edges do get a FALLTHRU flag when the get inlined. */
1833 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1834 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1835 flags |= EDGE_FALLTHRU;
1836 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1837 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1838 new_edge->probability = old_edge->probability;
1841 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1842 return;
1844 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1846 gimple copy_stmt;
1847 bool can_throw, nonlocal_goto;
1849 copy_stmt = gsi_stmt (si);
1850 if (!is_gimple_debug (copy_stmt))
1852 update_stmt (copy_stmt);
1853 if (gimple_in_ssa_p (cfun))
1854 mark_symbols_for_renaming (copy_stmt);
1857 /* Do this before the possible split_block. */
1858 gsi_next (&si);
1860 /* If this tree could throw an exception, there are two
1861 cases where we need to add abnormal edge(s): the
1862 tree wasn't in a region and there is a "current
1863 region" in the caller; or the original tree had
1864 EH edges. In both cases split the block after the tree,
1865 and add abnormal edge(s) as needed; we need both
1866 those from the callee and the caller.
1867 We check whether the copy can throw, because the const
1868 propagation can change an INDIRECT_REF which throws
1869 into a COMPONENT_REF which doesn't. If the copy
1870 can throw, the original could also throw. */
1871 can_throw = stmt_can_throw_internal (copy_stmt);
1872 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1874 if (can_throw || nonlocal_goto)
1876 if (!gsi_end_p (si))
1877 /* Note that bb's predecessor edges aren't necessarily
1878 right at this point; split_block doesn't care. */
1880 edge e = split_block (new_bb, copy_stmt);
1882 new_bb = e->dest;
1883 new_bb->aux = e->src->aux;
1884 si = gsi_start_bb (new_bb);
1888 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1889 make_eh_dispatch_edges (copy_stmt);
1890 else if (can_throw)
1891 make_eh_edges (copy_stmt);
1893 if (nonlocal_goto)
1894 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1896 if ((can_throw || nonlocal_goto)
1897 && gimple_in_ssa_p (cfun))
1898 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1899 can_throw, nonlocal_goto);
1903 /* Copy the PHIs. All blocks and edges are copied, some blocks
1904 was possibly split and new outgoing EH edges inserted.
1905 BB points to the block of original function and AUX pointers links
1906 the original and newly copied blocks. */
1908 static void
1909 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1911 basic_block const new_bb = (basic_block) bb->aux;
1912 edge_iterator ei;
1913 gimple phi;
1914 gimple_stmt_iterator si;
1916 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1918 tree res, new_res;
1919 gimple new_phi;
1920 edge new_edge;
1922 phi = gsi_stmt (si);
1923 res = PHI_RESULT (phi);
1924 new_res = res;
1925 if (is_gimple_reg (res))
1927 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1928 SSA_NAME_DEF_STMT (new_res)
1929 = new_phi = create_phi_node (new_res, new_bb);
1930 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1932 edge const old_edge
1933 = find_edge ((basic_block) new_edge->src->aux, bb);
1934 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1935 tree new_arg = arg;
1936 tree block = id->block;
1937 id->block = NULL_TREE;
1938 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1939 id->block = block;
1940 gcc_assert (new_arg);
1941 /* With return slot optimization we can end up with
1942 non-gimple (foo *)&this->m, fix that here. */
1943 if (TREE_CODE (new_arg) != SSA_NAME
1944 && TREE_CODE (new_arg) != FUNCTION_DECL
1945 && !is_gimple_val (new_arg))
1947 gimple_seq stmts = NULL;
1948 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1949 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1951 add_phi_arg (new_phi, new_arg, new_edge,
1952 gimple_phi_arg_location_from_edge (phi, old_edge));
1959 /* Wrapper for remap_decl so it can be used as a callback. */
1961 static tree
1962 remap_decl_1 (tree decl, void *data)
1964 return remap_decl (decl, (copy_body_data *) data);
1967 /* Build struct function and associated datastructures for the new clone
1968 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1970 static void
1971 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
1973 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1974 gcov_type count_scale;
1976 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1977 count_scale = (REG_BR_PROB_BASE * count
1978 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1979 else
1980 count_scale = REG_BR_PROB_BASE;
1982 /* Register specific tree functions. */
1983 gimple_register_cfg_hooks ();
1985 /* Get clean struct function. */
1986 push_struct_function (new_fndecl);
1988 /* We will rebuild these, so just sanity check that they are empty. */
1989 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
1990 gcc_assert (cfun->local_decls == NULL);
1991 gcc_assert (cfun->cfg == NULL);
1992 gcc_assert (cfun->decl == new_fndecl);
1994 /* Copy items we preserve during clonning. */
1995 cfun->static_chain_decl = src_cfun->static_chain_decl;
1996 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
1997 cfun->function_end_locus = src_cfun->function_end_locus;
1998 cfun->curr_properties = src_cfun->curr_properties;
1999 cfun->last_verified = src_cfun->last_verified;
2000 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2001 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2002 cfun->function_frequency = src_cfun->function_frequency;
2003 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2004 cfun->stdarg = src_cfun->stdarg;
2005 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2006 cfun->after_inlining = src_cfun->after_inlining;
2007 cfun->returns_struct = src_cfun->returns_struct;
2008 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2009 cfun->after_tree_profile = src_cfun->after_tree_profile;
2011 init_empty_tree_cfg ();
2013 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2014 ENTRY_BLOCK_PTR->count =
2015 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2016 REG_BR_PROB_BASE);
2017 ENTRY_BLOCK_PTR->frequency
2018 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2019 EXIT_BLOCK_PTR->count =
2020 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2021 REG_BR_PROB_BASE);
2022 EXIT_BLOCK_PTR->frequency =
2023 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2024 if (src_cfun->eh)
2025 init_eh_for_function ();
2027 if (src_cfun->gimple_df)
2029 init_tree_ssa (cfun);
2030 cfun->gimple_df->in_ssa_p = true;
2031 init_ssa_operands ();
2033 pop_cfun ();
2036 /* Make a copy of the body of FN so that it can be inserted inline in
2037 another function. Walks FN via CFG, returns new fndecl. */
2039 static tree
2040 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2041 basic_block entry_block_map, basic_block exit_block_map)
2043 tree callee_fndecl = id->src_fn;
2044 /* Original cfun for the callee, doesn't change. */
2045 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2046 struct function *cfun_to_copy;
2047 basic_block bb;
2048 tree new_fndecl = NULL;
2049 gcov_type count_scale;
2050 int last;
2052 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2053 count_scale = (REG_BR_PROB_BASE * count
2054 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2055 else
2056 count_scale = REG_BR_PROB_BASE;
2058 /* Register specific tree functions. */
2059 gimple_register_cfg_hooks ();
2061 /* Must have a CFG here at this point. */
2062 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2063 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2065 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2067 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2068 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2069 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2070 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2072 /* Duplicate any exception-handling regions. */
2073 if (cfun->eh)
2074 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2075 remap_decl_1, id);
2077 /* Use aux pointers to map the original blocks to copy. */
2078 FOR_EACH_BB_FN (bb, cfun_to_copy)
2080 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2081 bb->aux = new_bb;
2082 new_bb->aux = bb;
2085 last = last_basic_block;
2087 /* Now that we've duplicated the blocks, duplicate their edges. */
2088 FOR_ALL_BB_FN (bb, cfun_to_copy)
2089 copy_edges_for_bb (bb, count_scale, exit_block_map);
2091 if (gimple_in_ssa_p (cfun))
2092 FOR_ALL_BB_FN (bb, cfun_to_copy)
2093 copy_phis_for_bb (bb, id);
2095 FOR_ALL_BB_FN (bb, cfun_to_copy)
2097 ((basic_block)bb->aux)->aux = NULL;
2098 bb->aux = NULL;
2101 /* Zero out AUX fields of newly created block during EH edge
2102 insertion. */
2103 for (; last < last_basic_block; last++)
2104 BASIC_BLOCK (last)->aux = NULL;
2105 entry_block_map->aux = NULL;
2106 exit_block_map->aux = NULL;
2108 if (id->eh_map)
2110 pointer_map_destroy (id->eh_map);
2111 id->eh_map = NULL;
2114 return new_fndecl;
2117 /* Copy the debug STMT using ID. We deal with these statements in a
2118 special way: if any variable in their VALUE expression wasn't
2119 remapped yet, we won't remap it, because that would get decl uids
2120 out of sync, causing codegen differences between -g and -g0. If
2121 this arises, we drop the VALUE expression altogether. */
2123 static void
2124 copy_debug_stmt (gimple stmt, copy_body_data *id)
2126 tree t, *n;
2127 struct walk_stmt_info wi;
2129 t = id->block;
2130 if (gimple_block (stmt))
2132 tree *n;
2133 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2134 if (n)
2135 t = *n;
2137 gimple_set_block (stmt, t);
2139 /* Remap all the operands in COPY. */
2140 memset (&wi, 0, sizeof (wi));
2141 wi.info = id;
2143 processing_debug_stmt = 1;
2145 t = gimple_debug_bind_get_var (stmt);
2147 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2148 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2150 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2151 t = *n;
2153 else if (TREE_CODE (t) == VAR_DECL
2154 && !TREE_STATIC (t)
2155 && gimple_in_ssa_p (cfun)
2156 && !pointer_map_contains (id->decl_map, t)
2157 && !var_ann (t))
2158 /* T is a non-localized variable. */;
2159 else
2160 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2162 gimple_debug_bind_set_var (stmt, t);
2164 if (gimple_debug_bind_has_value_p (stmt))
2165 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2166 remap_gimple_op_r, &wi, NULL);
2168 /* Punt if any decl couldn't be remapped. */
2169 if (processing_debug_stmt < 0)
2170 gimple_debug_bind_reset_value (stmt);
2172 processing_debug_stmt = 0;
2174 update_stmt (stmt);
2175 if (gimple_in_ssa_p (cfun))
2176 mark_symbols_for_renaming (stmt);
2179 /* Process deferred debug stmts. In order to give values better odds
2180 of being successfully remapped, we delay the processing of debug
2181 stmts until all other stmts that might require remapping are
2182 processed. */
2184 static void
2185 copy_debug_stmts (copy_body_data *id)
2187 size_t i;
2188 gimple stmt;
2190 if (!id->debug_stmts)
2191 return;
2193 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2194 copy_debug_stmt (stmt, id);
2196 VEC_free (gimple, heap, id->debug_stmts);
2199 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2200 another function. */
2202 static tree
2203 copy_tree_body (copy_body_data *id)
2205 tree fndecl = id->src_fn;
2206 tree body = DECL_SAVED_TREE (fndecl);
2208 walk_tree (&body, copy_tree_body_r, id, NULL);
2210 return body;
2213 /* Make a copy of the body of FN so that it can be inserted inline in
2214 another function. */
2216 static tree
2217 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2218 basic_block entry_block_map, basic_block exit_block_map)
2220 tree fndecl = id->src_fn;
2221 tree body;
2223 /* If this body has a CFG, walk CFG and copy. */
2224 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2225 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map);
2226 copy_debug_stmts (id);
2228 return body;
2231 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2232 defined in function FN, or of a data member thereof. */
2234 static bool
2235 self_inlining_addr_expr (tree value, tree fn)
2237 tree var;
2239 if (TREE_CODE (value) != ADDR_EXPR)
2240 return false;
2242 var = get_base_address (TREE_OPERAND (value, 0));
2244 return var && auto_var_in_fn_p (var, fn);
2247 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2248 lexical block and line number information from base_stmt, if given,
2249 or from the last stmt of the block otherwise. */
2251 static gimple
2252 insert_init_debug_bind (copy_body_data *id,
2253 basic_block bb, tree var, tree value,
2254 gimple base_stmt)
2256 gimple note;
2257 gimple_stmt_iterator gsi;
2258 tree tracked_var;
2260 if (!gimple_in_ssa_p (id->src_cfun))
2261 return NULL;
2263 if (!MAY_HAVE_DEBUG_STMTS)
2264 return NULL;
2266 tracked_var = target_for_debug_bind (var);
2267 if (!tracked_var)
2268 return NULL;
2270 if (bb)
2272 gsi = gsi_last_bb (bb);
2273 if (!base_stmt && !gsi_end_p (gsi))
2274 base_stmt = gsi_stmt (gsi);
2277 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2279 if (bb)
2281 if (!gsi_end_p (gsi))
2282 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2283 else
2284 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2287 return note;
2290 static void
2291 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2293 /* If VAR represents a zero-sized variable, it's possible that the
2294 assignment statement may result in no gimple statements. */
2295 if (init_stmt)
2297 gimple_stmt_iterator si = gsi_last_bb (bb);
2299 /* We can end up with init statements that store to a non-register
2300 from a rhs with a conversion. Handle that here by forcing the
2301 rhs into a temporary. gimple_regimplify_operands is not
2302 prepared to do this for us. */
2303 if (!is_gimple_debug (init_stmt)
2304 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2305 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2306 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2308 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2309 gimple_expr_type (init_stmt),
2310 gimple_assign_rhs1 (init_stmt));
2311 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2312 GSI_NEW_STMT);
2313 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2314 gimple_assign_set_rhs1 (init_stmt, rhs);
2316 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2317 gimple_regimplify_operands (init_stmt, &si);
2318 mark_symbols_for_renaming (init_stmt);
2320 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2322 tree var, def = gimple_assign_lhs (init_stmt);
2324 if (TREE_CODE (def) == SSA_NAME)
2325 var = SSA_NAME_VAR (def);
2326 else
2327 var = def;
2329 insert_init_debug_bind (id, bb, var, def, init_stmt);
2334 /* Initialize parameter P with VALUE. If needed, produce init statement
2335 at the end of BB. When BB is NULL, we return init statement to be
2336 output later. */
2337 static gimple
2338 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2339 basic_block bb, tree *vars)
2341 gimple init_stmt = NULL;
2342 tree var;
2343 tree rhs = value;
2344 tree def = (gimple_in_ssa_p (cfun)
2345 ? gimple_default_def (id->src_cfun, p) : NULL);
2347 if (value
2348 && value != error_mark_node
2349 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2351 if (fold_convertible_p (TREE_TYPE (p), value))
2352 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2353 else
2354 /* ??? For valid (GIMPLE) programs we should not end up here.
2355 Still if something has gone wrong and we end up with truly
2356 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2357 to not leak invalid GIMPLE to the following passes. */
2358 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2361 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2362 here since the type of this decl must be visible to the calling
2363 function. */
2364 var = copy_decl_to_var (p, id);
2366 /* We're actually using the newly-created var. */
2367 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2369 get_var_ann (var);
2370 add_referenced_var (var);
2373 /* Declare this new variable. */
2374 TREE_CHAIN (var) = *vars;
2375 *vars = var;
2377 /* Make gimplifier happy about this variable. */
2378 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2380 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2381 we would not need to create a new variable here at all, if it
2382 weren't for debug info. Still, we can just use the argument
2383 value. */
2384 if (TREE_READONLY (p)
2385 && !TREE_ADDRESSABLE (p)
2386 && value && !TREE_SIDE_EFFECTS (value)
2387 && !def)
2389 /* We may produce non-gimple trees by adding NOPs or introduce
2390 invalid sharing when operand is not really constant.
2391 It is not big deal to prohibit constant propagation here as
2392 we will constant propagate in DOM1 pass anyway. */
2393 if (is_gimple_min_invariant (value)
2394 && useless_type_conversion_p (TREE_TYPE (p),
2395 TREE_TYPE (value))
2396 /* We have to be very careful about ADDR_EXPR. Make sure
2397 the base variable isn't a local variable of the inlined
2398 function, e.g., when doing recursive inlining, direct or
2399 mutually-recursive or whatever, which is why we don't
2400 just test whether fn == current_function_decl. */
2401 && ! self_inlining_addr_expr (value, fn))
2403 insert_decl_map (id, p, value);
2404 insert_debug_decl_map (id, p, var);
2405 return insert_init_debug_bind (id, bb, var, value, NULL);
2409 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2410 that way, when the PARM_DECL is encountered, it will be
2411 automatically replaced by the VAR_DECL. */
2412 insert_decl_map (id, p, var);
2414 /* Even if P was TREE_READONLY, the new VAR should not be.
2415 In the original code, we would have constructed a
2416 temporary, and then the function body would have never
2417 changed the value of P. However, now, we will be
2418 constructing VAR directly. The constructor body may
2419 change its value multiple times as it is being
2420 constructed. Therefore, it must not be TREE_READONLY;
2421 the back-end assumes that TREE_READONLY variable is
2422 assigned to only once. */
2423 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2424 TREE_READONLY (var) = 0;
2426 /* If there is no setup required and we are in SSA, take the easy route
2427 replacing all SSA names representing the function parameter by the
2428 SSA name passed to function.
2430 We need to construct map for the variable anyway as it might be used
2431 in different SSA names when parameter is set in function.
2433 Do replacement at -O0 for const arguments replaced by constant.
2434 This is important for builtin_constant_p and other construct requiring
2435 constant argument to be visible in inlined function body. */
2436 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2437 && (optimize
2438 || (TREE_READONLY (p)
2439 && is_gimple_min_invariant (rhs)))
2440 && (TREE_CODE (rhs) == SSA_NAME
2441 || is_gimple_min_invariant (rhs))
2442 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2444 insert_decl_map (id, def, rhs);
2445 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2448 /* If the value of argument is never used, don't care about initializing
2449 it. */
2450 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2452 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2453 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2456 /* Initialize this VAR_DECL from the equivalent argument. Convert
2457 the argument to the proper type in case it was promoted. */
2458 if (value)
2460 if (rhs == error_mark_node)
2462 insert_decl_map (id, p, var);
2463 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2466 STRIP_USELESS_TYPE_CONVERSION (rhs);
2468 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2469 keep our trees in gimple form. */
2470 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2472 def = remap_ssa_name (def, id);
2473 init_stmt = gimple_build_assign (def, rhs);
2474 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2475 set_default_def (var, NULL);
2477 else
2478 init_stmt = gimple_build_assign (var, rhs);
2480 if (bb && init_stmt)
2481 insert_init_stmt (id, bb, init_stmt);
2483 return init_stmt;
2486 /* Generate code to initialize the parameters of the function at the
2487 top of the stack in ID from the GIMPLE_CALL STMT. */
2489 static void
2490 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2491 tree fn, basic_block bb)
2493 tree parms;
2494 size_t i;
2495 tree p;
2496 tree vars = NULL_TREE;
2497 tree static_chain = gimple_call_chain (stmt);
2499 /* Figure out what the parameters are. */
2500 parms = DECL_ARGUMENTS (fn);
2502 /* Loop through the parameter declarations, replacing each with an
2503 equivalent VAR_DECL, appropriately initialized. */
2504 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2506 tree val;
2507 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2508 setup_one_parameter (id, p, val, fn, bb, &vars);
2511 /* Initialize the static chain. */
2512 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2513 gcc_assert (fn != current_function_decl);
2514 if (p)
2516 /* No static chain? Seems like a bug in tree-nested.c. */
2517 gcc_assert (static_chain);
2519 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2522 declare_inline_vars (id->block, vars);
2526 /* Declare a return variable to replace the RESULT_DECL for the
2527 function we are calling. An appropriate DECL_STMT is returned.
2528 The USE_STMT is filled to contain a use of the declaration to
2529 indicate the return value of the function.
2531 RETURN_SLOT, if non-null is place where to store the result. It
2532 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2533 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2535 The return value is a (possibly null) value that holds the result
2536 as seen by the caller. */
2538 static tree
2539 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
2541 tree callee = id->src_fn;
2542 tree caller = id->dst_fn;
2543 tree result = DECL_RESULT (callee);
2544 tree callee_type = TREE_TYPE (result);
2545 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
2546 tree var, use;
2548 /* We don't need to do anything for functions that don't return
2549 anything. */
2550 if (!result || VOID_TYPE_P (callee_type))
2551 return NULL_TREE;
2553 /* If there was a return slot, then the return value is the
2554 dereferenced address of that object. */
2555 if (return_slot)
2557 /* The front end shouldn't have used both return_slot and
2558 a modify expression. */
2559 gcc_assert (!modify_dest);
2560 if (DECL_BY_REFERENCE (result))
2562 tree return_slot_addr = build_fold_addr_expr (return_slot);
2563 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2565 /* We are going to construct *&return_slot and we can't do that
2566 for variables believed to be not addressable.
2568 FIXME: This check possibly can match, because values returned
2569 via return slot optimization are not believed to have address
2570 taken by alias analysis. */
2571 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2572 if (gimple_in_ssa_p (cfun))
2574 HOST_WIDE_INT bitsize;
2575 HOST_WIDE_INT bitpos;
2576 tree offset;
2577 enum machine_mode mode;
2578 int unsignedp;
2579 int volatilep;
2580 tree base;
2581 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2582 &offset,
2583 &mode, &unsignedp, &volatilep,
2584 false);
2585 if (TREE_CODE (base) == INDIRECT_REF)
2586 base = TREE_OPERAND (base, 0);
2587 if (TREE_CODE (base) == SSA_NAME)
2588 base = SSA_NAME_VAR (base);
2589 mark_sym_for_renaming (base);
2591 var = return_slot_addr;
2593 else
2595 var = return_slot;
2596 gcc_assert (TREE_CODE (var) != SSA_NAME);
2597 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2599 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2600 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2601 && !DECL_GIMPLE_REG_P (result)
2602 && DECL_P (var))
2603 DECL_GIMPLE_REG_P (var) = 0;
2604 use = NULL;
2605 goto done;
2608 /* All types requiring non-trivial constructors should have been handled. */
2609 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2611 /* Attempt to avoid creating a new temporary variable. */
2612 if (modify_dest
2613 && TREE_CODE (modify_dest) != SSA_NAME)
2615 bool use_it = false;
2617 /* We can't use MODIFY_DEST if there's type promotion involved. */
2618 if (!useless_type_conversion_p (callee_type, caller_type))
2619 use_it = false;
2621 /* ??? If we're assigning to a variable sized type, then we must
2622 reuse the destination variable, because we've no good way to
2623 create variable sized temporaries at this point. */
2624 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2625 use_it = true;
2627 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2628 reuse it as the result of the call directly. Don't do this if
2629 it would promote MODIFY_DEST to addressable. */
2630 else if (TREE_ADDRESSABLE (result))
2631 use_it = false;
2632 else
2634 tree base_m = get_base_address (modify_dest);
2636 /* If the base isn't a decl, then it's a pointer, and we don't
2637 know where that's going to go. */
2638 if (!DECL_P (base_m))
2639 use_it = false;
2640 else if (is_global_var (base_m))
2641 use_it = false;
2642 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2643 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2644 && !DECL_GIMPLE_REG_P (result)
2645 && DECL_GIMPLE_REG_P (base_m))
2646 use_it = false;
2647 else if (!TREE_ADDRESSABLE (base_m))
2648 use_it = true;
2651 if (use_it)
2653 var = modify_dest;
2654 use = NULL;
2655 goto done;
2659 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2661 var = copy_result_decl_to_var (result, id);
2662 if (gimple_in_ssa_p (cfun))
2664 get_var_ann (var);
2665 add_referenced_var (var);
2668 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2669 DECL_STRUCT_FUNCTION (caller)->local_decls
2670 = tree_cons (NULL_TREE, var,
2671 DECL_STRUCT_FUNCTION (caller)->local_decls);
2673 /* Do not have the rest of GCC warn about this variable as it should
2674 not be visible to the user. */
2675 TREE_NO_WARNING (var) = 1;
2677 declare_inline_vars (id->block, var);
2679 /* Build the use expr. If the return type of the function was
2680 promoted, convert it back to the expected type. */
2681 use = var;
2682 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2683 use = fold_convert (caller_type, var);
2685 STRIP_USELESS_TYPE_CONVERSION (use);
2687 if (DECL_BY_REFERENCE (result))
2689 TREE_ADDRESSABLE (var) = 1;
2690 var = build_fold_addr_expr (var);
2693 done:
2694 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2695 way, when the RESULT_DECL is encountered, it will be
2696 automatically replaced by the VAR_DECL. */
2697 insert_decl_map (id, result, var);
2699 /* Remember this so we can ignore it in remap_decls. */
2700 id->retvar = var;
2702 return use;
2705 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2706 to a local label. */
2708 static tree
2709 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2711 tree node = *nodep;
2712 tree fn = (tree) fnp;
2714 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2715 return node;
2717 if (TYPE_P (node))
2718 *walk_subtrees = 0;
2720 return NULL_TREE;
2723 /* Determine if the function can be copied. If so return NULL. If
2724 not return a string describng the reason for failure. */
2726 static const char *
2727 copy_forbidden (struct function *fun, tree fndecl)
2729 const char *reason = fun->cannot_be_copied_reason;
2730 tree step;
2732 /* Only examine the function once. */
2733 if (fun->cannot_be_copied_set)
2734 return reason;
2736 /* We cannot copy a function that receives a non-local goto
2737 because we cannot remap the destination label used in the
2738 function that is performing the non-local goto. */
2739 /* ??? Actually, this should be possible, if we work at it.
2740 No doubt there's just a handful of places that simply
2741 assume it doesn't happen and don't substitute properly. */
2742 if (fun->has_nonlocal_label)
2744 reason = G_("function %q+F can never be copied "
2745 "because it receives a non-local goto");
2746 goto fail;
2749 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2751 tree decl = TREE_VALUE (step);
2753 if (TREE_CODE (decl) == VAR_DECL
2754 && TREE_STATIC (decl)
2755 && !DECL_EXTERNAL (decl)
2756 && DECL_INITIAL (decl)
2757 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2758 has_label_address_in_static_1,
2759 fndecl))
2761 reason = G_("function %q+F can never be copied because it saves "
2762 "address of local label in a static variable");
2763 goto fail;
2767 fail:
2768 fun->cannot_be_copied_reason = reason;
2769 fun->cannot_be_copied_set = true;
2770 return reason;
2774 static const char *inline_forbidden_reason;
2776 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2777 iff a function can not be inlined. Also sets the reason why. */
2779 static tree
2780 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2781 struct walk_stmt_info *wip)
2783 tree fn = (tree) wip->info;
2784 tree t;
2785 gimple stmt = gsi_stmt (*gsi);
2787 switch (gimple_code (stmt))
2789 case GIMPLE_CALL:
2790 /* Refuse to inline alloca call unless user explicitly forced so as
2791 this may change program's memory overhead drastically when the
2792 function using alloca is called in loop. In GCC present in
2793 SPEC2000 inlining into schedule_block cause it to require 2GB of
2794 RAM instead of 256MB. */
2795 if (gimple_alloca_call_p (stmt)
2796 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2798 inline_forbidden_reason
2799 = G_("function %q+F can never be inlined because it uses "
2800 "alloca (override using the always_inline attribute)");
2801 *handled_ops_p = true;
2802 return fn;
2805 t = gimple_call_fndecl (stmt);
2806 if (t == NULL_TREE)
2807 break;
2809 /* We cannot inline functions that call setjmp. */
2810 if (setjmp_call_p (t))
2812 inline_forbidden_reason
2813 = G_("function %q+F can never be inlined because it uses setjmp");
2814 *handled_ops_p = true;
2815 return t;
2818 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2819 switch (DECL_FUNCTION_CODE (t))
2821 /* We cannot inline functions that take a variable number of
2822 arguments. */
2823 case BUILT_IN_VA_START:
2824 case BUILT_IN_NEXT_ARG:
2825 case BUILT_IN_VA_END:
2826 inline_forbidden_reason
2827 = G_("function %q+F can never be inlined because it "
2828 "uses variable argument lists");
2829 *handled_ops_p = true;
2830 return t;
2832 case BUILT_IN_LONGJMP:
2833 /* We can't inline functions that call __builtin_longjmp at
2834 all. The non-local goto machinery really requires the
2835 destination be in a different function. If we allow the
2836 function calling __builtin_longjmp to be inlined into the
2837 function calling __builtin_setjmp, Things will Go Awry. */
2838 inline_forbidden_reason
2839 = G_("function %q+F can never be inlined because "
2840 "it uses setjmp-longjmp exception handling");
2841 *handled_ops_p = true;
2842 return t;
2844 case BUILT_IN_NONLOCAL_GOTO:
2845 /* Similarly. */
2846 inline_forbidden_reason
2847 = G_("function %q+F can never be inlined because "
2848 "it uses non-local goto");
2849 *handled_ops_p = true;
2850 return t;
2852 case BUILT_IN_RETURN:
2853 case BUILT_IN_APPLY_ARGS:
2854 /* If a __builtin_apply_args caller would be inlined,
2855 it would be saving arguments of the function it has
2856 been inlined into. Similarly __builtin_return would
2857 return from the function the inline has been inlined into. */
2858 inline_forbidden_reason
2859 = G_("function %q+F can never be inlined because "
2860 "it uses __builtin_return or __builtin_apply_args");
2861 *handled_ops_p = true;
2862 return t;
2864 default:
2865 break;
2867 break;
2869 case GIMPLE_GOTO:
2870 t = gimple_goto_dest (stmt);
2872 /* We will not inline a function which uses computed goto. The
2873 addresses of its local labels, which may be tucked into
2874 global storage, are of course not constant across
2875 instantiations, which causes unexpected behavior. */
2876 if (TREE_CODE (t) != LABEL_DECL)
2878 inline_forbidden_reason
2879 = G_("function %q+F can never be inlined "
2880 "because it contains a computed goto");
2881 *handled_ops_p = true;
2882 return t;
2884 break;
2886 default:
2887 break;
2890 *handled_ops_p = false;
2891 return NULL_TREE;
2894 /* Return true if FNDECL is a function that cannot be inlined into
2895 another one. */
2897 static bool
2898 inline_forbidden_p (tree fndecl)
2900 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2901 struct walk_stmt_info wi;
2902 struct pointer_set_t *visited_nodes;
2903 basic_block bb;
2904 bool forbidden_p = false;
2906 /* First check for shared reasons not to copy the code. */
2907 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2908 if (inline_forbidden_reason != NULL)
2909 return true;
2911 /* Next, walk the statements of the function looking for
2912 constraucts we can't handle, or are non-optimal for inlining. */
2913 visited_nodes = pointer_set_create ();
2914 memset (&wi, 0, sizeof (wi));
2915 wi.info = (void *) fndecl;
2916 wi.pset = visited_nodes;
2918 FOR_EACH_BB_FN (bb, fun)
2920 gimple ret;
2921 gimple_seq seq = bb_seq (bb);
2922 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
2923 forbidden_p = (ret != NULL);
2924 if (forbidden_p)
2925 break;
2928 pointer_set_destroy (visited_nodes);
2929 return forbidden_p;
2932 /* Returns nonzero if FN is a function that does not have any
2933 fundamental inline blocking properties. */
2935 bool
2936 tree_inlinable_function_p (tree fn)
2938 bool inlinable = true;
2939 bool do_warning;
2940 tree always_inline;
2942 /* If we've already decided this function shouldn't be inlined,
2943 there's no need to check again. */
2944 if (DECL_UNINLINABLE (fn))
2945 return false;
2947 /* We only warn for functions declared `inline' by the user. */
2948 do_warning = (warn_inline
2949 && DECL_DECLARED_INLINE_P (fn)
2950 && !DECL_NO_INLINE_WARNING_P (fn)
2951 && !DECL_IN_SYSTEM_HEADER (fn));
2953 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2955 if (flag_no_inline
2956 && always_inline == NULL)
2958 if (do_warning)
2959 warning (OPT_Winline, "function %q+F can never be inlined because it "
2960 "is suppressed using -fno-inline", fn);
2961 inlinable = false;
2964 /* Don't auto-inline anything that might not be bound within
2965 this unit of translation. */
2966 else if (!DECL_DECLARED_INLINE_P (fn)
2967 && DECL_REPLACEABLE_P (fn))
2968 inlinable = false;
2970 else if (!function_attribute_inlinable_p (fn))
2972 if (do_warning)
2973 warning (OPT_Winline, "function %q+F can never be inlined because it "
2974 "uses attributes conflicting with inlining", fn);
2975 inlinable = false;
2978 else if (inline_forbidden_p (fn))
2980 /* See if we should warn about uninlinable functions. Previously,
2981 some of these warnings would be issued while trying to expand
2982 the function inline, but that would cause multiple warnings
2983 about functions that would for example call alloca. But since
2984 this a property of the function, just one warning is enough.
2985 As a bonus we can now give more details about the reason why a
2986 function is not inlinable. */
2987 if (always_inline)
2988 sorry (inline_forbidden_reason, fn);
2989 else if (do_warning)
2990 warning (OPT_Winline, inline_forbidden_reason, fn);
2992 inlinable = false;
2995 /* Squirrel away the result so that we don't have to check again. */
2996 DECL_UNINLINABLE (fn) = !inlinable;
2998 return inlinable;
3001 /* Estimate the cost of a memory move. Use machine dependent
3002 word size and take possible memcpy call into account. */
3005 estimate_move_cost (tree type)
3007 HOST_WIDE_INT size;
3009 gcc_assert (!VOID_TYPE_P (type));
3011 size = int_size_in_bytes (type);
3013 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3014 /* Cost of a memcpy call, 3 arguments and the call. */
3015 return 4;
3016 else
3017 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3020 /* Returns cost of operation CODE, according to WEIGHTS */
3022 static int
3023 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3024 tree op1 ATTRIBUTE_UNUSED, tree op2)
3026 switch (code)
3028 /* These are "free" conversions, or their presumed cost
3029 is folded into other operations. */
3030 case RANGE_EXPR:
3031 CASE_CONVERT:
3032 case COMPLEX_EXPR:
3033 case PAREN_EXPR:
3034 return 0;
3036 /* Assign cost of 1 to usual operations.
3037 ??? We may consider mapping RTL costs to this. */
3038 case COND_EXPR:
3039 case VEC_COND_EXPR:
3041 case PLUS_EXPR:
3042 case POINTER_PLUS_EXPR:
3043 case MINUS_EXPR:
3044 case MULT_EXPR:
3046 case ADDR_SPACE_CONVERT_EXPR:
3047 case FIXED_CONVERT_EXPR:
3048 case FIX_TRUNC_EXPR:
3050 case NEGATE_EXPR:
3051 case FLOAT_EXPR:
3052 case MIN_EXPR:
3053 case MAX_EXPR:
3054 case ABS_EXPR:
3056 case LSHIFT_EXPR:
3057 case RSHIFT_EXPR:
3058 case LROTATE_EXPR:
3059 case RROTATE_EXPR:
3060 case VEC_LSHIFT_EXPR:
3061 case VEC_RSHIFT_EXPR:
3063 case BIT_IOR_EXPR:
3064 case BIT_XOR_EXPR:
3065 case BIT_AND_EXPR:
3066 case BIT_NOT_EXPR:
3068 case TRUTH_ANDIF_EXPR:
3069 case TRUTH_ORIF_EXPR:
3070 case TRUTH_AND_EXPR:
3071 case TRUTH_OR_EXPR:
3072 case TRUTH_XOR_EXPR:
3073 case TRUTH_NOT_EXPR:
3075 case LT_EXPR:
3076 case LE_EXPR:
3077 case GT_EXPR:
3078 case GE_EXPR:
3079 case EQ_EXPR:
3080 case NE_EXPR:
3081 case ORDERED_EXPR:
3082 case UNORDERED_EXPR:
3084 case UNLT_EXPR:
3085 case UNLE_EXPR:
3086 case UNGT_EXPR:
3087 case UNGE_EXPR:
3088 case UNEQ_EXPR:
3089 case LTGT_EXPR:
3091 case CONJ_EXPR:
3093 case PREDECREMENT_EXPR:
3094 case PREINCREMENT_EXPR:
3095 case POSTDECREMENT_EXPR:
3096 case POSTINCREMENT_EXPR:
3098 case REALIGN_LOAD_EXPR:
3100 case REDUC_MAX_EXPR:
3101 case REDUC_MIN_EXPR:
3102 case REDUC_PLUS_EXPR:
3103 case WIDEN_SUM_EXPR:
3104 case WIDEN_MULT_EXPR:
3105 case DOT_PROD_EXPR:
3107 case VEC_WIDEN_MULT_HI_EXPR:
3108 case VEC_WIDEN_MULT_LO_EXPR:
3109 case VEC_UNPACK_HI_EXPR:
3110 case VEC_UNPACK_LO_EXPR:
3111 case VEC_UNPACK_FLOAT_HI_EXPR:
3112 case VEC_UNPACK_FLOAT_LO_EXPR:
3113 case VEC_PACK_TRUNC_EXPR:
3114 case VEC_PACK_SAT_EXPR:
3115 case VEC_PACK_FIX_TRUNC_EXPR:
3116 case VEC_EXTRACT_EVEN_EXPR:
3117 case VEC_EXTRACT_ODD_EXPR:
3118 case VEC_INTERLEAVE_HIGH_EXPR:
3119 case VEC_INTERLEAVE_LOW_EXPR:
3121 return 1;
3123 /* Few special cases of expensive operations. This is useful
3124 to avoid inlining on functions having too many of these. */
3125 case TRUNC_DIV_EXPR:
3126 case CEIL_DIV_EXPR:
3127 case FLOOR_DIV_EXPR:
3128 case ROUND_DIV_EXPR:
3129 case EXACT_DIV_EXPR:
3130 case TRUNC_MOD_EXPR:
3131 case CEIL_MOD_EXPR:
3132 case FLOOR_MOD_EXPR:
3133 case ROUND_MOD_EXPR:
3134 case RDIV_EXPR:
3135 if (TREE_CODE (op2) != INTEGER_CST)
3136 return weights->div_mod_cost;
3137 return 1;
3139 default:
3140 /* We expect a copy assignment with no operator. */
3141 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3142 return 0;
3147 /* Estimate number of instructions that will be created by expanding
3148 the statements in the statement sequence STMTS.
3149 WEIGHTS contains weights attributed to various constructs. */
3151 static
3152 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3154 int cost;
3155 gimple_stmt_iterator gsi;
3157 cost = 0;
3158 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3159 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3161 return cost;
3165 /* Estimate number of instructions that will be created by expanding STMT.
3166 WEIGHTS contains weights attributed to various constructs. */
3169 estimate_num_insns (gimple stmt, eni_weights *weights)
3171 unsigned cost, i;
3172 enum gimple_code code = gimple_code (stmt);
3173 tree lhs;
3174 tree rhs;
3176 switch (code)
3178 case GIMPLE_ASSIGN:
3179 /* Try to estimate the cost of assignments. We have three cases to
3180 deal with:
3181 1) Simple assignments to registers;
3182 2) Stores to things that must live in memory. This includes
3183 "normal" stores to scalars, but also assignments of large
3184 structures, or constructors of big arrays;
3186 Let us look at the first two cases, assuming we have "a = b + C":
3187 <GIMPLE_ASSIGN <var_decl "a">
3188 <plus_expr <var_decl "b"> <constant C>>
3189 If "a" is a GIMPLE register, the assignment to it is free on almost
3190 any target, because "a" usually ends up in a real register. Hence
3191 the only cost of this expression comes from the PLUS_EXPR, and we
3192 can ignore the GIMPLE_ASSIGN.
3193 If "a" is not a GIMPLE register, the assignment to "a" will most
3194 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3195 of moving something into "a", which we compute using the function
3196 estimate_move_cost. */
3197 lhs = gimple_assign_lhs (stmt);
3198 rhs = gimple_assign_rhs1 (stmt);
3200 if (is_gimple_reg (lhs))
3201 cost = 0;
3202 else
3203 cost = estimate_move_cost (TREE_TYPE (lhs));
3205 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3206 cost += estimate_move_cost (TREE_TYPE (rhs));
3208 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3209 gimple_assign_rhs1 (stmt),
3210 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3211 == GIMPLE_BINARY_RHS
3212 ? gimple_assign_rhs2 (stmt) : NULL);
3213 break;
3215 case GIMPLE_COND:
3216 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3217 gimple_op (stmt, 0),
3218 gimple_op (stmt, 1));
3219 break;
3221 case GIMPLE_SWITCH:
3222 /* Take into account cost of the switch + guess 2 conditional jumps for
3223 each case label.
3225 TODO: once the switch expansion logic is sufficiently separated, we can
3226 do better job on estimating cost of the switch. */
3227 if (weights->time_based)
3228 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3229 else
3230 cost = gimple_switch_num_labels (stmt) * 2;
3231 break;
3233 case GIMPLE_CALL:
3235 tree decl = gimple_call_fndecl (stmt);
3236 tree addr = gimple_call_fn (stmt);
3237 tree funtype = TREE_TYPE (addr);
3239 if (POINTER_TYPE_P (funtype))
3240 funtype = TREE_TYPE (funtype);
3242 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3243 cost = weights->target_builtin_call_cost;
3244 else
3245 cost = weights->call_cost;
3247 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3248 switch (DECL_FUNCTION_CODE (decl))
3250 case BUILT_IN_CONSTANT_P:
3251 return 0;
3252 case BUILT_IN_EXPECT:
3253 return 0;
3255 /* Prefetch instruction is not expensive. */
3256 case BUILT_IN_PREFETCH:
3257 cost = weights->target_builtin_call_cost;
3258 break;
3260 /* Exception state returns or moves registers around. */
3261 case BUILT_IN_EH_FILTER:
3262 case BUILT_IN_EH_POINTER:
3263 case BUILT_IN_EH_COPY_VALUES:
3264 return 0;
3266 default:
3267 break;
3270 if (decl)
3271 funtype = TREE_TYPE (decl);
3273 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3274 cost += estimate_move_cost (TREE_TYPE (funtype));
3275 /* Our cost must be kept in sync with
3276 cgraph_estimate_size_after_inlining that does use function
3277 declaration to figure out the arguments. */
3278 if (decl && DECL_ARGUMENTS (decl))
3280 tree arg;
3281 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3282 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3283 cost += estimate_move_cost (TREE_TYPE (arg));
3285 else if (funtype && prototype_p (funtype))
3287 tree t;
3288 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3289 t = TREE_CHAIN (t))
3290 if (!VOID_TYPE_P (TREE_VALUE (t)))
3291 cost += estimate_move_cost (TREE_VALUE (t));
3293 else
3295 for (i = 0; i < gimple_call_num_args (stmt); i++)
3297 tree arg = gimple_call_arg (stmt, i);
3298 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3299 cost += estimate_move_cost (TREE_TYPE (arg));
3303 break;
3306 case GIMPLE_GOTO:
3307 case GIMPLE_LABEL:
3308 case GIMPLE_NOP:
3309 case GIMPLE_PHI:
3310 case GIMPLE_RETURN:
3311 case GIMPLE_PREDICT:
3312 case GIMPLE_DEBUG:
3313 return 0;
3315 case GIMPLE_ASM:
3316 return asm_str_count (gimple_asm_string (stmt));
3318 case GIMPLE_RESX:
3319 /* This is either going to be an external function call with one
3320 argument, or two register copy statements plus a goto. */
3321 return 2;
3323 case GIMPLE_EH_DISPATCH:
3324 /* ??? This is going to turn into a switch statement. Ideally
3325 we'd have a look at the eh region and estimate the number of
3326 edges involved. */
3327 return 10;
3329 case GIMPLE_BIND:
3330 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3332 case GIMPLE_EH_FILTER:
3333 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3335 case GIMPLE_CATCH:
3336 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3338 case GIMPLE_TRY:
3339 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3340 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3342 /* OpenMP directives are generally very expensive. */
3344 case GIMPLE_OMP_RETURN:
3345 case GIMPLE_OMP_SECTIONS_SWITCH:
3346 case GIMPLE_OMP_ATOMIC_STORE:
3347 case GIMPLE_OMP_CONTINUE:
3348 /* ...except these, which are cheap. */
3349 return 0;
3351 case GIMPLE_OMP_ATOMIC_LOAD:
3352 return weights->omp_cost;
3354 case GIMPLE_OMP_FOR:
3355 return (weights->omp_cost
3356 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3357 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3359 case GIMPLE_OMP_PARALLEL:
3360 case GIMPLE_OMP_TASK:
3361 case GIMPLE_OMP_CRITICAL:
3362 case GIMPLE_OMP_MASTER:
3363 case GIMPLE_OMP_ORDERED:
3364 case GIMPLE_OMP_SECTION:
3365 case GIMPLE_OMP_SECTIONS:
3366 case GIMPLE_OMP_SINGLE:
3367 return (weights->omp_cost
3368 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3370 default:
3371 gcc_unreachable ();
3374 return cost;
3377 /* Estimate number of instructions that will be created by expanding
3378 function FNDECL. WEIGHTS contains weights attributed to various
3379 constructs. */
3382 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3384 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3385 gimple_stmt_iterator bsi;
3386 basic_block bb;
3387 int n = 0;
3389 gcc_assert (my_function && my_function->cfg);
3390 FOR_EACH_BB_FN (bb, my_function)
3392 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3393 n += estimate_num_insns (gsi_stmt (bsi), weights);
3396 return n;
3400 /* Initializes weights used by estimate_num_insns. */
3402 void
3403 init_inline_once (void)
3405 eni_size_weights.call_cost = 1;
3406 eni_size_weights.target_builtin_call_cost = 1;
3407 eni_size_weights.div_mod_cost = 1;
3408 eni_size_weights.omp_cost = 40;
3409 eni_size_weights.time_based = false;
3411 /* Estimating time for call is difficult, since we have no idea what the
3412 called function does. In the current uses of eni_time_weights,
3413 underestimating the cost does less harm than overestimating it, so
3414 we choose a rather small value here. */
3415 eni_time_weights.call_cost = 10;
3416 eni_time_weights.target_builtin_call_cost = 10;
3417 eni_time_weights.div_mod_cost = 10;
3418 eni_time_weights.omp_cost = 40;
3419 eni_time_weights.time_based = true;
3422 /* Estimate the number of instructions in a gimple_seq. */
3425 count_insns_seq (gimple_seq seq, eni_weights *weights)
3427 gimple_stmt_iterator gsi;
3428 int n = 0;
3429 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3430 n += estimate_num_insns (gsi_stmt (gsi), weights);
3432 return n;
3436 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3438 static void
3439 prepend_lexical_block (tree current_block, tree new_block)
3441 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3442 BLOCK_SUBBLOCKS (current_block) = new_block;
3443 BLOCK_SUPERCONTEXT (new_block) = current_block;
3446 /* Fetch callee declaration from the call graph edge going from NODE and
3447 associated with STMR call statement. Return NULL_TREE if not found. */
3448 static tree
3449 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3451 struct cgraph_edge *cs;
3453 cs = cgraph_edge (node, stmt);
3454 if (cs)
3455 return cs->callee->decl;
3457 return NULL_TREE;
3460 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3462 static bool
3463 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3465 tree use_retvar;
3466 tree fn;
3467 struct pointer_map_t *st, *dst;
3468 tree return_slot;
3469 tree modify_dest;
3470 location_t saved_location;
3471 struct cgraph_edge *cg_edge;
3472 cgraph_inline_failed_t reason;
3473 basic_block return_block;
3474 edge e;
3475 gimple_stmt_iterator gsi, stmt_gsi;
3476 bool successfully_inlined = FALSE;
3477 bool purge_dead_abnormal_edges;
3478 tree t_step;
3479 tree var;
3481 /* Set input_location here so we get the right instantiation context
3482 if we call instantiate_decl from inlinable_function_p. */
3483 saved_location = input_location;
3484 if (gimple_has_location (stmt))
3485 input_location = gimple_location (stmt);
3487 /* From here on, we're only interested in CALL_EXPRs. */
3488 if (gimple_code (stmt) != GIMPLE_CALL)
3489 goto egress;
3491 /* First, see if we can figure out what function is being called.
3492 If we cannot, then there is no hope of inlining the function. */
3493 fn = gimple_call_fndecl (stmt);
3494 if (!fn)
3496 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3497 if (!fn)
3498 goto egress;
3501 /* Turn forward declarations into real ones. */
3502 fn = cgraph_node (fn)->decl;
3504 /* If FN is a declaration of a function in a nested scope that was
3505 globally declared inline, we don't set its DECL_INITIAL.
3506 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3507 C++ front-end uses it for cdtors to refer to their internal
3508 declarations, that are not real functions. Fortunately those
3509 don't have trees to be saved, so we can tell by checking their
3510 gimple_body. */
3511 if (!DECL_INITIAL (fn)
3512 && DECL_ABSTRACT_ORIGIN (fn)
3513 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3514 fn = DECL_ABSTRACT_ORIGIN (fn);
3516 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3517 Kill this check once this is fixed. */
3518 if (!id->dst_node->analyzed)
3519 goto egress;
3521 cg_edge = cgraph_edge (id->dst_node, stmt);
3523 /* Don't inline functions with different EH personalities. */
3524 if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3525 && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
3526 && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3527 != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
3528 goto egress;
3530 /* Don't try to inline functions that are not well-suited to
3531 inlining. */
3532 if (!cgraph_inline_p (cg_edge, &reason))
3534 /* If this call was originally indirect, we do not want to emit any
3535 inlining related warnings or sorry messages because there are no
3536 guarantees regarding those. */
3537 if (cg_edge->indirect_call)
3538 goto egress;
3540 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3541 /* Avoid warnings during early inline pass. */
3542 && cgraph_global_info_ready)
3544 sorry ("inlining failed in call to %q+F: %s", fn,
3545 cgraph_inline_failed_string (reason));
3546 sorry ("called from here");
3548 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3549 && !DECL_IN_SYSTEM_HEADER (fn)
3550 && reason != CIF_UNSPECIFIED
3551 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3552 /* Avoid warnings during early inline pass. */
3553 && cgraph_global_info_ready)
3555 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3556 fn, cgraph_inline_failed_string (reason));
3557 warning (OPT_Winline, "called from here");
3559 goto egress;
3561 fn = cg_edge->callee->decl;
3563 #ifdef ENABLE_CHECKING
3564 if (cg_edge->callee->decl != id->dst_node->decl)
3565 verify_cgraph_node (cg_edge->callee);
3566 #endif
3568 /* We will be inlining this callee. */
3569 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3571 /* Update the callers EH personality. */
3572 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3573 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3574 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3576 /* Split the block holding the GIMPLE_CALL. */
3577 e = split_block (bb, stmt);
3578 bb = e->src;
3579 return_block = e->dest;
3580 remove_edge (e);
3582 /* split_block splits after the statement; work around this by
3583 moving the call into the second block manually. Not pretty,
3584 but seems easier than doing the CFG manipulation by hand
3585 when the GIMPLE_CALL is in the last statement of BB. */
3586 stmt_gsi = gsi_last_bb (bb);
3587 gsi_remove (&stmt_gsi, false);
3589 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3590 been the source of abnormal edges. In this case, schedule
3591 the removal of dead abnormal edges. */
3592 gsi = gsi_start_bb (return_block);
3593 if (gsi_end_p (gsi))
3595 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3596 purge_dead_abnormal_edges = true;
3598 else
3600 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3601 purge_dead_abnormal_edges = false;
3604 stmt_gsi = gsi_start_bb (return_block);
3606 /* Build a block containing code to initialize the arguments, the
3607 actual inline expansion of the body, and a label for the return
3608 statements within the function to jump to. The type of the
3609 statement expression is the return type of the function call. */
3610 id->block = make_node (BLOCK);
3611 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3612 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3613 prepend_lexical_block (gimple_block (stmt), id->block);
3615 /* Local declarations will be replaced by their equivalents in this
3616 map. */
3617 st = id->decl_map;
3618 id->decl_map = pointer_map_create ();
3619 dst = id->debug_map;
3620 id->debug_map = NULL;
3622 /* Record the function we are about to inline. */
3623 id->src_fn = fn;
3624 id->src_node = cg_edge->callee;
3625 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3626 id->gimple_call = stmt;
3628 gcc_assert (!id->src_cfun->after_inlining);
3630 id->entry_bb = bb;
3631 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3633 gimple_stmt_iterator si = gsi_last_bb (bb);
3634 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3635 NOT_TAKEN),
3636 GSI_NEW_STMT);
3638 initialize_inlined_parameters (id, stmt, fn, bb);
3640 if (DECL_INITIAL (fn))
3641 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3643 /* Return statements in the function body will be replaced by jumps
3644 to the RET_LABEL. */
3645 gcc_assert (DECL_INITIAL (fn));
3646 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3648 /* Find the LHS to which the result of this call is assigned. */
3649 return_slot = NULL;
3650 if (gimple_call_lhs (stmt))
3652 modify_dest = gimple_call_lhs (stmt);
3654 /* The function which we are inlining might not return a value,
3655 in which case we should issue a warning that the function
3656 does not return a value. In that case the optimizers will
3657 see that the variable to which the value is assigned was not
3658 initialized. We do not want to issue a warning about that
3659 uninitialized variable. */
3660 if (DECL_P (modify_dest))
3661 TREE_NO_WARNING (modify_dest) = 1;
3663 if (gimple_call_return_slot_opt_p (stmt))
3665 return_slot = modify_dest;
3666 modify_dest = NULL;
3669 else
3670 modify_dest = NULL;
3672 /* If we are inlining a call to the C++ operator new, we don't want
3673 to use type based alias analysis on the return value. Otherwise
3674 we may get confused if the compiler sees that the inlined new
3675 function returns a pointer which was just deleted. See bug
3676 33407. */
3677 if (DECL_IS_OPERATOR_NEW (fn))
3679 return_slot = NULL;
3680 modify_dest = NULL;
3683 /* Declare the return variable for the function. */
3684 use_retvar = declare_return_variable (id, return_slot, modify_dest);
3686 /* Add local vars in this inlined callee to caller. */
3687 t_step = id->src_cfun->local_decls;
3688 for (; t_step; t_step = TREE_CHAIN (t_step))
3690 var = TREE_VALUE (t_step);
3691 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3693 if (var_ann (var) && add_referenced_var (var))
3694 cfun->local_decls = tree_cons (NULL_TREE, var,
3695 cfun->local_decls);
3697 else if (!can_be_nonlocal (var, id))
3698 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3699 cfun->local_decls);
3702 if (dump_file && (dump_flags & TDF_DETAILS))
3704 fprintf (dump_file, "Inlining ");
3705 print_generic_expr (dump_file, id->src_fn, 0);
3706 fprintf (dump_file, " to ");
3707 print_generic_expr (dump_file, id->dst_fn, 0);
3708 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3711 /* This is it. Duplicate the callee body. Assume callee is
3712 pre-gimplified. Note that we must not alter the caller
3713 function in any way before this point, as this CALL_EXPR may be
3714 a self-referential call; if we're calling ourselves, we need to
3715 duplicate our body before altering anything. */
3716 copy_body (id, bb->count,
3717 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3718 bb, return_block);
3720 /* Reset the escaped and callused solutions. */
3721 if (cfun->gimple_df)
3723 pt_solution_reset (&cfun->gimple_df->escaped);
3724 pt_solution_reset (&cfun->gimple_df->callused);
3727 /* Clean up. */
3728 if (id->debug_map)
3730 pointer_map_destroy (id->debug_map);
3731 id->debug_map = dst;
3733 pointer_map_destroy (id->decl_map);
3734 id->decl_map = st;
3736 /* Unlink the calls virtual operands before replacing it. */
3737 unlink_stmt_vdef (stmt);
3739 /* If the inlined function returns a result that we care about,
3740 substitute the GIMPLE_CALL with an assignment of the return
3741 variable to the LHS of the call. That is, if STMT was
3742 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3743 if (use_retvar && gimple_call_lhs (stmt))
3745 gimple old_stmt = stmt;
3746 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3747 gsi_replace (&stmt_gsi, stmt, false);
3748 if (gimple_in_ssa_p (cfun))
3749 mark_symbols_for_renaming (stmt);
3750 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3752 else
3754 /* Handle the case of inlining a function with no return
3755 statement, which causes the return value to become undefined. */
3756 if (gimple_call_lhs (stmt)
3757 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3759 tree name = gimple_call_lhs (stmt);
3760 tree var = SSA_NAME_VAR (name);
3761 tree def = gimple_default_def (cfun, var);
3763 if (def)
3765 /* If the variable is used undefined, make this name
3766 undefined via a move. */
3767 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3768 gsi_replace (&stmt_gsi, stmt, true);
3770 else
3772 /* Otherwise make this variable undefined. */
3773 gsi_remove (&stmt_gsi, true);
3774 set_default_def (var, name);
3775 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3778 else
3779 gsi_remove (&stmt_gsi, true);
3782 if (purge_dead_abnormal_edges)
3783 gimple_purge_dead_abnormal_call_edges (return_block);
3785 /* If the value of the new expression is ignored, that's OK. We
3786 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3787 the equivalent inlined version either. */
3788 if (is_gimple_assign (stmt))
3790 gcc_assert (gimple_assign_single_p (stmt)
3791 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3792 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3795 /* Output the inlining info for this abstract function, since it has been
3796 inlined. If we don't do this now, we can lose the information about the
3797 variables in the function when the blocks get blown away as soon as we
3798 remove the cgraph node. */
3799 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3801 /* Update callgraph if needed. */
3802 cgraph_remove_node (cg_edge->callee);
3804 id->block = NULL_TREE;
3805 successfully_inlined = TRUE;
3807 egress:
3808 input_location = saved_location;
3809 return successfully_inlined;
3812 /* Expand call statements reachable from STMT_P.
3813 We can only have CALL_EXPRs as the "toplevel" tree code or nested
3814 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
3815 unfortunately not use that function here because we need a pointer
3816 to the CALL_EXPR, not the tree itself. */
3818 static bool
3819 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3821 gimple_stmt_iterator gsi;
3823 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3825 gimple stmt = gsi_stmt (gsi);
3827 if (is_gimple_call (stmt)
3828 && expand_call_inline (bb, stmt, id))
3829 return true;
3832 return false;
3836 /* Walk all basic blocks created after FIRST and try to fold every statement
3837 in the STATEMENTS pointer set. */
3839 static void
3840 fold_marked_statements (int first, struct pointer_set_t *statements)
3842 for (; first < n_basic_blocks; first++)
3843 if (BASIC_BLOCK (first))
3845 gimple_stmt_iterator gsi;
3847 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3848 !gsi_end_p (gsi);
3849 gsi_next (&gsi))
3850 if (pointer_set_contains (statements, gsi_stmt (gsi)))
3852 gimple old_stmt = gsi_stmt (gsi);
3853 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3855 if (old_decl && DECL_BUILT_IN (old_decl))
3857 /* Folding builtins can create multiple instructions,
3858 we need to look at all of them. */
3859 gimple_stmt_iterator i2 = gsi;
3860 gsi_prev (&i2);
3861 if (fold_stmt (&gsi))
3863 gimple new_stmt;
3864 if (gsi_end_p (i2))
3865 i2 = gsi_start_bb (BASIC_BLOCK (first));
3866 else
3867 gsi_next (&i2);
3868 while (1)
3870 new_stmt = gsi_stmt (i2);
3871 update_stmt (new_stmt);
3872 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3873 new_stmt);
3875 if (new_stmt == gsi_stmt (gsi))
3877 /* It is okay to check only for the very last
3878 of these statements. If it is a throwing
3879 statement nothing will change. If it isn't
3880 this can remove EH edges. If that weren't
3881 correct then because some intermediate stmts
3882 throw, but not the last one. That would mean
3883 we'd have to split the block, which we can't
3884 here and we'd loose anyway. And as builtins
3885 probably never throw, this all
3886 is mood anyway. */
3887 if (maybe_clean_or_replace_eh_stmt (old_stmt,
3888 new_stmt))
3889 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3890 break;
3892 gsi_next (&i2);
3896 else if (fold_stmt (&gsi))
3898 /* Re-read the statement from GSI as fold_stmt() may
3899 have changed it. */
3900 gimple new_stmt = gsi_stmt (gsi);
3901 update_stmt (new_stmt);
3903 if (is_gimple_call (old_stmt)
3904 || is_gimple_call (new_stmt))
3905 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3906 new_stmt);
3908 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
3909 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3915 /* Return true if BB has at least one abnormal outgoing edge. */
3917 static inline bool
3918 has_abnormal_outgoing_edge_p (basic_block bb)
3920 edge e;
3921 edge_iterator ei;
3923 FOR_EACH_EDGE (e, ei, bb->succs)
3924 if (e->flags & EDGE_ABNORMAL)
3925 return true;
3927 return false;
3930 /* Expand calls to inline functions in the body of FN. */
3932 unsigned int
3933 optimize_inline_calls (tree fn)
3935 copy_body_data id;
3936 basic_block bb;
3937 int last = n_basic_blocks;
3938 struct gimplify_ctx gctx;
3940 /* There is no point in performing inlining if errors have already
3941 occurred -- and we might crash if we try to inline invalid
3942 code. */
3943 if (errorcount || sorrycount)
3944 return 0;
3946 /* Clear out ID. */
3947 memset (&id, 0, sizeof (id));
3949 id.src_node = id.dst_node = cgraph_node (fn);
3950 id.dst_fn = fn;
3951 /* Or any functions that aren't finished yet. */
3952 if (current_function_decl)
3953 id.dst_fn = current_function_decl;
3955 id.copy_decl = copy_decl_maybe_to_var;
3956 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3957 id.transform_new_cfg = false;
3958 id.transform_return_to_modify = true;
3959 id.transform_lang_insert_block = NULL;
3960 id.statements_to_fold = pointer_set_create ();
3962 push_gimplify_context (&gctx);
3964 /* We make no attempts to keep dominance info up-to-date. */
3965 free_dominance_info (CDI_DOMINATORS);
3966 free_dominance_info (CDI_POST_DOMINATORS);
3968 /* Register specific gimple functions. */
3969 gimple_register_cfg_hooks ();
3971 /* Reach the trees by walking over the CFG, and note the
3972 enclosing basic-blocks in the call edges. */
3973 /* We walk the blocks going forward, because inlined function bodies
3974 will split id->current_basic_block, and the new blocks will
3975 follow it; we'll trudge through them, processing their CALL_EXPRs
3976 along the way. */
3977 FOR_EACH_BB (bb)
3978 gimple_expand_calls_inline (bb, &id);
3980 pop_gimplify_context (NULL);
3982 #ifdef ENABLE_CHECKING
3984 struct cgraph_edge *e;
3986 verify_cgraph_node (id.dst_node);
3988 /* Double check that we inlined everything we are supposed to inline. */
3989 for (e = id.dst_node->callees; e; e = e->next_callee)
3990 gcc_assert (e->inline_failed);
3992 #endif
3994 /* Fold the statements before compacting/renumbering the basic blocks. */
3995 fold_marked_statements (last, id.statements_to_fold);
3996 pointer_set_destroy (id.statements_to_fold);
3998 gcc_assert (!id.debug_stmts);
4000 /* Renumber the (code) basic_blocks consecutively. */
4001 compact_blocks ();
4002 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4003 number_blocks (fn);
4005 fold_cond_expr_cond ();
4006 delete_unreachable_blocks_update_callgraph (&id);
4007 #ifdef ENABLE_CHECKING
4008 verify_cgraph_node (id.dst_node);
4009 #endif
4011 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4012 not possible yet - the IPA passes might make various functions to not
4013 throw and they don't care to proactively update local EH info. This is
4014 done later in fixup_cfg pass that also execute the verification. */
4015 return (TODO_update_ssa
4016 | TODO_cleanup_cfg
4017 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4018 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4021 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4023 tree
4024 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4026 enum tree_code code = TREE_CODE (*tp);
4027 enum tree_code_class cl = TREE_CODE_CLASS (code);
4029 /* We make copies of most nodes. */
4030 if (IS_EXPR_CODE_CLASS (cl)
4031 || code == TREE_LIST
4032 || code == TREE_VEC
4033 || code == TYPE_DECL
4034 || code == OMP_CLAUSE)
4036 /* Because the chain gets clobbered when we make a copy, we save it
4037 here. */
4038 tree chain = NULL_TREE, new_tree;
4040 chain = TREE_CHAIN (*tp);
4042 /* Copy the node. */
4043 new_tree = copy_node (*tp);
4045 /* Propagate mudflap marked-ness. */
4046 if (flag_mudflap && mf_marked_p (*tp))
4047 mf_mark (new_tree);
4049 *tp = new_tree;
4051 /* Now, restore the chain, if appropriate. That will cause
4052 walk_tree to walk into the chain as well. */
4053 if (code == PARM_DECL
4054 || code == TREE_LIST
4055 || code == OMP_CLAUSE)
4056 TREE_CHAIN (*tp) = chain;
4058 /* For now, we don't update BLOCKs when we make copies. So, we
4059 have to nullify all BIND_EXPRs. */
4060 if (TREE_CODE (*tp) == BIND_EXPR)
4061 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4063 else if (code == CONSTRUCTOR)
4065 /* CONSTRUCTOR nodes need special handling because
4066 we need to duplicate the vector of elements. */
4067 tree new_tree;
4069 new_tree = copy_node (*tp);
4071 /* Propagate mudflap marked-ness. */
4072 if (flag_mudflap && mf_marked_p (*tp))
4073 mf_mark (new_tree);
4075 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4076 CONSTRUCTOR_ELTS (*tp));
4077 *tp = new_tree;
4079 else if (TREE_CODE_CLASS (code) == tcc_type)
4080 *walk_subtrees = 0;
4081 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4082 *walk_subtrees = 0;
4083 else if (TREE_CODE_CLASS (code) == tcc_constant)
4084 *walk_subtrees = 0;
4085 else
4086 gcc_assert (code != STATEMENT_LIST);
4087 return NULL_TREE;
4090 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4091 information indicating to what new SAVE_EXPR this one should be mapped,
4092 use that one. Otherwise, create a new node and enter it in ST. FN is
4093 the function into which the copy will be placed. */
4095 static void
4096 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4098 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4099 tree *n;
4100 tree t;
4102 /* See if we already encountered this SAVE_EXPR. */
4103 n = (tree *) pointer_map_contains (st, *tp);
4105 /* If we didn't already remap this SAVE_EXPR, do so now. */
4106 if (!n)
4108 t = copy_node (*tp);
4110 /* Remember this SAVE_EXPR. */
4111 *pointer_map_insert (st, *tp) = t;
4112 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4113 *pointer_map_insert (st, t) = t;
4115 else
4117 /* We've already walked into this SAVE_EXPR; don't do it again. */
4118 *walk_subtrees = 0;
4119 t = *n;
4122 /* Replace this SAVE_EXPR with the copy. */
4123 *tp = t;
4126 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4127 copies the declaration and enters it in the splay_tree in DATA (which is
4128 really an `copy_body_data *'). */
4130 static tree
4131 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4132 void *data)
4134 copy_body_data *id = (copy_body_data *) data;
4136 /* Don't walk into types. */
4137 if (TYPE_P (*tp))
4138 *walk_subtrees = 0;
4140 else if (TREE_CODE (*tp) == LABEL_EXPR)
4142 tree decl = TREE_OPERAND (*tp, 0);
4144 /* Copy the decl and remember the copy. */
4145 insert_decl_map (id, decl, id->copy_decl (decl, id));
4148 return NULL_TREE;
4151 /* Perform any modifications to EXPR required when it is unsaved. Does
4152 not recurse into EXPR's subtrees. */
4154 static void
4155 unsave_expr_1 (tree expr)
4157 switch (TREE_CODE (expr))
4159 case TARGET_EXPR:
4160 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4161 It's OK for this to happen if it was part of a subtree that
4162 isn't immediately expanded, such as operand 2 of another
4163 TARGET_EXPR. */
4164 if (TREE_OPERAND (expr, 1))
4165 break;
4167 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4168 TREE_OPERAND (expr, 3) = NULL_TREE;
4169 break;
4171 default:
4172 break;
4176 /* Called via walk_tree when an expression is unsaved. Using the
4177 splay_tree pointed to by ST (which is really a `splay_tree'),
4178 remaps all local declarations to appropriate replacements. */
4180 static tree
4181 unsave_r (tree *tp, int *walk_subtrees, void *data)
4183 copy_body_data *id = (copy_body_data *) data;
4184 struct pointer_map_t *st = id->decl_map;
4185 tree *n;
4187 /* Only a local declaration (variable or label). */
4188 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4189 || TREE_CODE (*tp) == LABEL_DECL)
4191 /* Lookup the declaration. */
4192 n = (tree *) pointer_map_contains (st, *tp);
4194 /* If it's there, remap it. */
4195 if (n)
4196 *tp = *n;
4199 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4200 gcc_unreachable ();
4201 else if (TREE_CODE (*tp) == BIND_EXPR)
4202 copy_bind_expr (tp, walk_subtrees, id);
4203 else if (TREE_CODE (*tp) == SAVE_EXPR
4204 || TREE_CODE (*tp) == TARGET_EXPR)
4205 remap_save_expr (tp, st, walk_subtrees);
4206 else
4208 copy_tree_r (tp, walk_subtrees, NULL);
4210 /* Do whatever unsaving is required. */
4211 unsave_expr_1 (*tp);
4214 /* Keep iterating. */
4215 return NULL_TREE;
4218 /* Copies everything in EXPR and replaces variables, labels
4219 and SAVE_EXPRs local to EXPR. */
4221 tree
4222 unsave_expr_now (tree expr)
4224 copy_body_data id;
4226 /* There's nothing to do for NULL_TREE. */
4227 if (expr == 0)
4228 return expr;
4230 /* Set up ID. */
4231 memset (&id, 0, sizeof (id));
4232 id.src_fn = current_function_decl;
4233 id.dst_fn = current_function_decl;
4234 id.decl_map = pointer_map_create ();
4235 id.debug_map = NULL;
4237 id.copy_decl = copy_decl_no_change;
4238 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4239 id.transform_new_cfg = false;
4240 id.transform_return_to_modify = false;
4241 id.transform_lang_insert_block = NULL;
4243 /* Walk the tree once to find local labels. */
4244 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4246 /* Walk the tree again, copying, remapping, and unsaving. */
4247 walk_tree (&expr, unsave_r, &id, NULL);
4249 /* Clean up. */
4250 pointer_map_destroy (id.decl_map);
4251 if (id.debug_map)
4252 pointer_map_destroy (id.debug_map);
4254 return expr;
4257 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4258 label, copies the declaration and enters it in the splay_tree in DATA (which
4259 is really a 'copy_body_data *'. */
4261 static tree
4262 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4263 bool *handled_ops_p ATTRIBUTE_UNUSED,
4264 struct walk_stmt_info *wi)
4266 copy_body_data *id = (copy_body_data *) wi->info;
4267 gimple stmt = gsi_stmt (*gsip);
4269 if (gimple_code (stmt) == GIMPLE_LABEL)
4271 tree decl = gimple_label_label (stmt);
4273 /* Copy the decl and remember the copy. */
4274 insert_decl_map (id, decl, id->copy_decl (decl, id));
4277 return NULL_TREE;
4281 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4282 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4283 remaps all local declarations to appropriate replacements in gimple
4284 operands. */
4286 static tree
4287 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4289 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4290 copy_body_data *id = (copy_body_data *) wi->info;
4291 struct pointer_map_t *st = id->decl_map;
4292 tree *n;
4293 tree expr = *tp;
4295 /* Only a local declaration (variable or label). */
4296 if ((TREE_CODE (expr) == VAR_DECL
4297 && !TREE_STATIC (expr))
4298 || TREE_CODE (expr) == LABEL_DECL)
4300 /* Lookup the declaration. */
4301 n = (tree *) pointer_map_contains (st, expr);
4303 /* If it's there, remap it. */
4304 if (n)
4305 *tp = *n;
4306 *walk_subtrees = 0;
4308 else if (TREE_CODE (expr) == STATEMENT_LIST
4309 || TREE_CODE (expr) == BIND_EXPR
4310 || TREE_CODE (expr) == SAVE_EXPR)
4311 gcc_unreachable ();
4312 else if (TREE_CODE (expr) == TARGET_EXPR)
4314 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4315 It's OK for this to happen if it was part of a subtree that
4316 isn't immediately expanded, such as operand 2 of another
4317 TARGET_EXPR. */
4318 if (!TREE_OPERAND (expr, 1))
4320 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4321 TREE_OPERAND (expr, 3) = NULL_TREE;
4325 /* Keep iterating. */
4326 return NULL_TREE;
4330 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4331 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4332 remaps all local declarations to appropriate replacements in gimple
4333 statements. */
4335 static tree
4336 replace_locals_stmt (gimple_stmt_iterator *gsip,
4337 bool *handled_ops_p ATTRIBUTE_UNUSED,
4338 struct walk_stmt_info *wi)
4340 copy_body_data *id = (copy_body_data *) wi->info;
4341 gimple stmt = gsi_stmt (*gsip);
4343 if (gimple_code (stmt) == GIMPLE_BIND)
4345 tree block = gimple_bind_block (stmt);
4347 if (block)
4349 remap_block (&block, id);
4350 gimple_bind_set_block (stmt, block);
4353 /* This will remap a lot of the same decls again, but this should be
4354 harmless. */
4355 if (gimple_bind_vars (stmt))
4356 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4359 /* Keep iterating. */
4360 return NULL_TREE;
4364 /* Copies everything in SEQ and replaces variables and labels local to
4365 current_function_decl. */
4367 gimple_seq
4368 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4370 copy_body_data id;
4371 struct walk_stmt_info wi;
4372 struct pointer_set_t *visited;
4373 gimple_seq copy;
4375 /* There's nothing to do for NULL_TREE. */
4376 if (seq == NULL)
4377 return seq;
4379 /* Set up ID. */
4380 memset (&id, 0, sizeof (id));
4381 id.src_fn = current_function_decl;
4382 id.dst_fn = current_function_decl;
4383 id.decl_map = pointer_map_create ();
4384 id.debug_map = NULL;
4386 id.copy_decl = copy_decl_no_change;
4387 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4388 id.transform_new_cfg = false;
4389 id.transform_return_to_modify = false;
4390 id.transform_lang_insert_block = NULL;
4392 /* Walk the tree once to find local labels. */
4393 memset (&wi, 0, sizeof (wi));
4394 visited = pointer_set_create ();
4395 wi.info = &id;
4396 wi.pset = visited;
4397 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4398 pointer_set_destroy (visited);
4400 copy = gimple_seq_copy (seq);
4402 /* Walk the copy, remapping decls. */
4403 memset (&wi, 0, sizeof (wi));
4404 wi.info = &id;
4405 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4407 /* Clean up. */
4408 pointer_map_destroy (id.decl_map);
4409 if (id.debug_map)
4410 pointer_map_destroy (id.debug_map);
4412 return copy;
4416 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4418 static tree
4419 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4421 if (*tp == data)
4422 return (tree) data;
4423 else
4424 return NULL;
4427 bool
4428 debug_find_tree (tree top, tree search)
4430 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4434 /* Declare the variables created by the inliner. Add all the variables in
4435 VARS to BIND_EXPR. */
4437 static void
4438 declare_inline_vars (tree block, tree vars)
4440 tree t;
4441 for (t = vars; t; t = TREE_CHAIN (t))
4443 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4444 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4445 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4448 if (block)
4449 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4452 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4453 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4454 VAR_DECL translation. */
4456 static tree
4457 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4459 /* Don't generate debug information for the copy if we wouldn't have
4460 generated it for the copy either. */
4461 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4462 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4464 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4465 declaration inspired this copy. */
4466 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4468 /* The new variable/label has no RTL, yet. */
4469 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4470 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4471 SET_DECL_RTL (copy, NULL_RTX);
4473 /* These args would always appear unused, if not for this. */
4474 TREE_USED (copy) = 1;
4476 /* Set the context for the new declaration. */
4477 if (!DECL_CONTEXT (decl))
4478 /* Globals stay global. */
4480 else if (DECL_CONTEXT (decl) != id->src_fn)
4481 /* Things that weren't in the scope of the function we're inlining
4482 from aren't in the scope we're inlining to, either. */
4484 else if (TREE_STATIC (decl))
4485 /* Function-scoped static variables should stay in the original
4486 function. */
4488 else
4489 /* Ordinary automatic local variables are now in the scope of the
4490 new function. */
4491 DECL_CONTEXT (copy) = id->dst_fn;
4493 return copy;
4496 static tree
4497 copy_decl_to_var (tree decl, copy_body_data *id)
4499 tree copy, type;
4501 gcc_assert (TREE_CODE (decl) == PARM_DECL
4502 || TREE_CODE (decl) == RESULT_DECL);
4504 type = TREE_TYPE (decl);
4506 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4507 VAR_DECL, DECL_NAME (decl), type);
4508 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4509 TREE_READONLY (copy) = TREE_READONLY (decl);
4510 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4511 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4513 return copy_decl_for_dup_finish (id, decl, copy);
4516 /* Like copy_decl_to_var, but create a return slot object instead of a
4517 pointer variable for return by invisible reference. */
4519 static tree
4520 copy_result_decl_to_var (tree decl, copy_body_data *id)
4522 tree copy, type;
4524 gcc_assert (TREE_CODE (decl) == PARM_DECL
4525 || TREE_CODE (decl) == RESULT_DECL);
4527 type = TREE_TYPE (decl);
4528 if (DECL_BY_REFERENCE (decl))
4529 type = TREE_TYPE (type);
4531 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4532 VAR_DECL, DECL_NAME (decl), type);
4533 TREE_READONLY (copy) = TREE_READONLY (decl);
4534 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4535 if (!DECL_BY_REFERENCE (decl))
4537 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4538 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4541 return copy_decl_for_dup_finish (id, decl, copy);
4544 tree
4545 copy_decl_no_change (tree decl, copy_body_data *id)
4547 tree copy;
4549 copy = copy_node (decl);
4551 /* The COPY is not abstract; it will be generated in DST_FN. */
4552 DECL_ABSTRACT (copy) = 0;
4553 lang_hooks.dup_lang_specific_decl (copy);
4555 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4556 been taken; it's for internal bookkeeping in expand_goto_internal. */
4557 if (TREE_CODE (copy) == LABEL_DECL)
4559 TREE_ADDRESSABLE (copy) = 0;
4560 LABEL_DECL_UID (copy) = -1;
4563 return copy_decl_for_dup_finish (id, decl, copy);
4566 static tree
4567 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4569 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4570 return copy_decl_to_var (decl, id);
4571 else
4572 return copy_decl_no_change (decl, id);
4575 /* Return a copy of the function's argument tree. */
4576 static tree
4577 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4578 bitmap args_to_skip, tree *vars)
4580 tree arg, *parg;
4581 tree new_parm = NULL;
4582 int i = 0;
4584 parg = &new_parm;
4586 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4587 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4589 tree new_tree = remap_decl (arg, id);
4590 lang_hooks.dup_lang_specific_decl (new_tree);
4591 *parg = new_tree;
4592 parg = &TREE_CHAIN (new_tree);
4594 else if (!pointer_map_contains (id->decl_map, arg))
4596 /* Make an equivalent VAR_DECL. If the argument was used
4597 as temporary variable later in function, the uses will be
4598 replaced by local variable. */
4599 tree var = copy_decl_to_var (arg, id);
4600 get_var_ann (var);
4601 add_referenced_var (var);
4602 insert_decl_map (id, arg, var);
4603 /* Declare this new variable. */
4604 TREE_CHAIN (var) = *vars;
4605 *vars = var;
4607 return new_parm;
4610 /* Return a copy of the function's static chain. */
4611 static tree
4612 copy_static_chain (tree static_chain, copy_body_data * id)
4614 tree *chain_copy, *pvar;
4616 chain_copy = &static_chain;
4617 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4619 tree new_tree = remap_decl (*pvar, id);
4620 lang_hooks.dup_lang_specific_decl (new_tree);
4621 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4622 *pvar = new_tree;
4624 return static_chain;
4627 /* Return true if the function is allowed to be versioned.
4628 This is a guard for the versioning functionality. */
4630 bool
4631 tree_versionable_function_p (tree fndecl)
4633 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4634 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4637 /* Delete all unreachable basic blocks and update callgraph.
4638 Doing so is somewhat nontrivial because we need to update all clones and
4639 remove inline function that become unreachable. */
4641 static bool
4642 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4644 bool changed = false;
4645 basic_block b, next_bb;
4647 find_unreachable_blocks ();
4649 /* Delete all unreachable basic blocks. */
4651 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4653 next_bb = b->next_bb;
4655 if (!(b->flags & BB_REACHABLE))
4657 gimple_stmt_iterator bsi;
4659 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4660 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4662 struct cgraph_edge *e;
4663 struct cgraph_node *node;
4665 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4667 if (!e->inline_failed)
4668 cgraph_remove_node_and_inline_clones (e->callee);
4669 else
4670 cgraph_remove_edge (e);
4672 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4673 && id->dst_node->clones)
4674 for (node = id->dst_node->clones; node != id->dst_node;)
4676 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4678 if (!e->inline_failed)
4679 cgraph_remove_node_and_inline_clones (e->callee);
4680 else
4681 cgraph_remove_edge (e);
4684 if (node->clones)
4685 node = node->clones;
4686 else if (node->next_sibling_clone)
4687 node = node->next_sibling_clone;
4688 else
4690 while (node != id->dst_node && !node->next_sibling_clone)
4691 node = node->clone_of;
4692 if (node != id->dst_node)
4693 node = node->next_sibling_clone;
4697 delete_basic_block (b);
4698 changed = true;
4702 if (changed)
4703 tidy_fallthru_edges ();
4704 return changed;
4707 /* Update clone info after duplication. */
4709 static void
4710 update_clone_info (copy_body_data * id)
4712 struct cgraph_node *node;
4713 if (!id->dst_node->clones)
4714 return;
4715 for (node = id->dst_node->clones; node != id->dst_node;)
4717 /* First update replace maps to match the new body. */
4718 if (node->clone.tree_map)
4720 unsigned int i;
4721 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4723 struct ipa_replace_map *replace_info;
4724 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4725 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4726 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4729 if (node->clones)
4730 node = node->clones;
4731 else if (node->next_sibling_clone)
4732 node = node->next_sibling_clone;
4733 else
4735 while (node != id->dst_node && !node->next_sibling_clone)
4736 node = node->clone_of;
4737 if (node != id->dst_node)
4738 node = node->next_sibling_clone;
4743 /* Create a copy of a function's tree.
4744 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4745 of the original function and the new copied function
4746 respectively. In case we want to replace a DECL
4747 tree with another tree while duplicating the function's
4748 body, TREE_MAP represents the mapping between these
4749 trees. If UPDATE_CLONES is set, the call_stmt fields
4750 of edges of clones of the function will be updated. */
4751 void
4752 tree_function_versioning (tree old_decl, tree new_decl,
4753 VEC(ipa_replace_map_p,gc)* tree_map,
4754 bool update_clones, bitmap args_to_skip)
4756 struct cgraph_node *old_version_node;
4757 struct cgraph_node *new_version_node;
4758 copy_body_data id;
4759 tree p;
4760 unsigned i;
4761 struct ipa_replace_map *replace_info;
4762 basic_block old_entry_block, bb;
4763 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4765 tree t_step;
4766 tree old_current_function_decl = current_function_decl;
4767 tree vars = NULL_TREE;
4769 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4770 && TREE_CODE (new_decl) == FUNCTION_DECL);
4771 DECL_POSSIBLY_INLINED (old_decl) = 1;
4773 old_version_node = cgraph_node (old_decl);
4774 new_version_node = cgraph_node (new_decl);
4776 /* Output the inlining info for this abstract function, since it has been
4777 inlined. If we don't do this now, we can lose the information about the
4778 variables in the function when the blocks get blown away as soon as we
4779 remove the cgraph node. */
4780 (*debug_hooks->outlining_inline_function) (old_decl);
4782 DECL_ARTIFICIAL (new_decl) = 1;
4783 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4784 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
4786 /* Prepare the data structures for the tree copy. */
4787 memset (&id, 0, sizeof (id));
4789 /* Generate a new name for the new version. */
4790 id.statements_to_fold = pointer_set_create ();
4792 id.decl_map = pointer_map_create ();
4793 id.debug_map = NULL;
4794 id.src_fn = old_decl;
4795 id.dst_fn = new_decl;
4796 id.src_node = old_version_node;
4797 id.dst_node = new_version_node;
4798 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4799 if (id.src_node->ipa_transforms_to_apply)
4801 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
4802 unsigned int i;
4804 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
4805 id.src_node->ipa_transforms_to_apply);
4806 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
4807 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
4808 VEC_index (ipa_opt_pass,
4809 old_transforms_to_apply,
4810 i));
4813 id.copy_decl = copy_decl_no_change;
4814 id.transform_call_graph_edges
4815 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4816 id.transform_new_cfg = true;
4817 id.transform_return_to_modify = false;
4818 id.transform_lang_insert_block = NULL;
4820 current_function_decl = new_decl;
4821 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4822 (DECL_STRUCT_FUNCTION (old_decl));
4823 initialize_cfun (new_decl, old_decl,
4824 old_entry_block->count);
4825 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4827 /* Copy the function's static chain. */
4828 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4829 if (p)
4830 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4831 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4832 &id);
4834 /* If there's a tree_map, prepare for substitution. */
4835 if (tree_map)
4836 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4838 gimple init;
4839 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4840 if (replace_info->replace_p)
4842 tree op = replace_info->new_tree;
4844 STRIP_NOPS (op);
4846 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4847 op = TREE_OPERAND (op, 0);
4849 if (TREE_CODE (op) == ADDR_EXPR)
4851 op = TREE_OPERAND (op, 0);
4852 while (handled_component_p (op))
4853 op = TREE_OPERAND (op, 0);
4854 if (TREE_CODE (op) == VAR_DECL)
4855 add_referenced_var (op);
4857 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4858 init = setup_one_parameter (&id, replace_info->old_tree,
4859 replace_info->new_tree, id.src_fn,
4860 NULL,
4861 &vars);
4862 if (init)
4863 VEC_safe_push (gimple, heap, init_stmts, init);
4866 /* Copy the function's arguments. */
4867 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4868 DECL_ARGUMENTS (new_decl) =
4869 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4870 args_to_skip, &vars);
4872 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4874 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4875 number_blocks (id.dst_fn);
4877 declare_inline_vars (DECL_INITIAL (new_decl), vars);
4879 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4880 /* Add local vars. */
4881 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4882 t_step; t_step = TREE_CHAIN (t_step))
4884 tree var = TREE_VALUE (t_step);
4885 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
4886 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
4887 else if (!can_be_nonlocal (var, &id))
4888 cfun->local_decls =
4889 tree_cons (NULL_TREE, remap_decl (var, &id),
4890 cfun->local_decls);
4893 /* Copy the Function's body. */
4894 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
4895 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
4897 if (DECL_RESULT (old_decl) != NULL_TREE)
4899 tree *res_decl = &DECL_RESULT (old_decl);
4900 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4901 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4904 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4905 number_blocks (new_decl);
4907 /* We want to create the BB unconditionally, so that the addition of
4908 debug stmts doesn't affect BB count, which may in the end cause
4909 codegen differences. */
4910 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
4911 while (VEC_length (gimple, init_stmts))
4912 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
4913 update_clone_info (&id);
4915 /* Remap the nonlocal_goto_save_area, if any. */
4916 if (cfun->nonlocal_goto_save_area)
4918 struct walk_stmt_info wi;
4920 memset (&wi, 0, sizeof (wi));
4921 wi.info = &id;
4922 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
4925 /* Clean up. */
4926 pointer_map_destroy (id.decl_map);
4927 if (id.debug_map)
4928 pointer_map_destroy (id.debug_map);
4929 free_dominance_info (CDI_DOMINATORS);
4930 free_dominance_info (CDI_POST_DOMINATORS);
4932 fold_marked_statements (0, id.statements_to_fold);
4933 pointer_set_destroy (id.statements_to_fold);
4934 fold_cond_expr_cond ();
4935 delete_unreachable_blocks_update_callgraph (&id);
4936 update_ssa (TODO_update_ssa);
4937 free_dominance_info (CDI_DOMINATORS);
4938 free_dominance_info (CDI_POST_DOMINATORS);
4940 gcc_assert (!id.debug_stmts);
4941 VEC_free (gimple, heap, init_stmts);
4942 pop_cfun ();
4943 current_function_decl = old_current_function_decl;
4944 gcc_assert (!current_function_decl
4945 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
4946 return;
4949 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
4950 the callee and return the inlined body on success. */
4952 tree
4953 maybe_inline_call_in_expr (tree exp)
4955 tree fn = get_callee_fndecl (exp);
4957 /* We can only try to inline "const" functions. */
4958 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
4960 struct pointer_map_t *decl_map = pointer_map_create ();
4961 call_expr_arg_iterator iter;
4962 copy_body_data id;
4963 tree param, arg, t;
4965 /* Remap the parameters. */
4966 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
4967 param;
4968 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
4969 *pointer_map_insert (decl_map, param) = arg;
4971 memset (&id, 0, sizeof (id));
4972 id.src_fn = fn;
4973 id.dst_fn = current_function_decl;
4974 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
4975 id.decl_map = decl_map;
4977 id.copy_decl = copy_decl_no_change;
4978 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4979 id.transform_new_cfg = false;
4980 id.transform_return_to_modify = true;
4981 id.transform_lang_insert_block = false;
4983 /* Make sure not to unshare trees behind the front-end's back
4984 since front-end specific mechanisms may rely on sharing. */
4985 id.regimplify = false;
4986 id.do_not_unshare = true;
4988 /* We're not inside any EH region. */
4989 id.eh_lp_nr = 0;
4991 t = copy_tree_body (&id);
4992 pointer_map_destroy (decl_map);
4994 /* We can only return something suitable for use in a GENERIC
4995 expression tree. */
4996 if (TREE_CODE (t) == MODIFY_EXPR)
4997 return TREE_OPERAND (t, 1);
5000 return NULL_TREE;
5003 /* Duplicate a type, fields and all. */
5005 tree
5006 build_duplicate_type (tree type)
5008 struct copy_body_data id;
5010 memset (&id, 0, sizeof (id));
5011 id.src_fn = current_function_decl;
5012 id.dst_fn = current_function_decl;
5013 id.src_cfun = cfun;
5014 id.decl_map = pointer_map_create ();
5015 id.debug_map = NULL;
5016 id.copy_decl = copy_decl_no_change;
5018 type = remap_type_1 (type, &id);
5020 pointer_map_destroy (id.decl_map);
5021 if (id.debug_map)
5022 pointer_map_destroy (id.debug_map);
5024 TYPE_CANONICAL (type) = type;
5026 return type;
5029 /* Return whether it is safe to inline a function because it used different
5030 target specific options or call site actual types mismatch parameter types.
5031 E is the call edge to be checked. */
5032 bool
5033 tree_can_inline_p (struct cgraph_edge *e)
5035 #if 0
5036 /* This causes a regression in SPEC in that it prevents a cold function from
5037 inlining a hot function. Perhaps this should only apply to functions
5038 that the user declares hot/cold/optimize explicitly. */
5040 /* Don't inline a function with a higher optimization level than the
5041 caller, or with different space constraints (hot/cold functions). */
5042 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5043 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5045 if (caller_tree != callee_tree)
5047 struct cl_optimization *caller_opt
5048 = TREE_OPTIMIZATION ((caller_tree)
5049 ? caller_tree
5050 : optimization_default_node);
5052 struct cl_optimization *callee_opt
5053 = TREE_OPTIMIZATION ((callee_tree)
5054 ? callee_tree
5055 : optimization_default_node);
5057 if ((caller_opt->optimize > callee_opt->optimize)
5058 || (caller_opt->optimize_size != callee_opt->optimize_size))
5059 return false;
5061 #endif
5062 tree caller, callee;
5064 caller = e->caller->decl;
5065 callee = e->callee->decl;
5067 /* We cannot inline a function that uses a different EH personality
5068 than the caller. */
5069 if (DECL_FUNCTION_PERSONALITY (caller)
5070 && DECL_FUNCTION_PERSONALITY (callee)
5071 && (DECL_FUNCTION_PERSONALITY (caller)
5072 != DECL_FUNCTION_PERSONALITY (callee)))
5074 e->inline_failed = CIF_UNSPECIFIED;
5075 gimple_call_set_cannot_inline (e->call_stmt, true);
5076 return false;
5079 /* Allow the backend to decide if inlining is ok. */
5080 if (!targetm.target_option.can_inline_p (caller, callee))
5082 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5083 gimple_call_set_cannot_inline (e->call_stmt, true);
5084 e->call_stmt_cannot_inline_p = true;
5085 return false;
5088 if (e->call_stmt
5089 && !gimple_check_call_args (e->call_stmt))
5091 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5092 gimple_call_set_cannot_inline (e->call_stmt, true);
5093 e->call_stmt_cannot_inline_p = true;
5094 return false;
5097 return true;