Remove generic cloning code for now - this code is not supposed to
[official-gcc.git] / gcc / tree-inline.c
blobf0ed4ba73a72067aaf73322afa00dd18cea4e05b
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "hashtab.h"
36 #include "langhooks.h"
37 #include "basic-block.h"
38 #include "tree-iterator.h"
39 #include "cgraph.h"
40 #include "intl.h"
41 #include "tree-mudflap.h"
42 #include "tree-flow.h"
43 #include "function.h"
44 #include "ggc.h"
45 #include "tree-flow.h"
46 #include "diagnostic.h"
47 #include "except.h"
48 #include "debug.h"
49 #include "pointer-set.h"
50 #include "ipa-prop.h"
51 #include "value-prof.h"
52 #include "tree-pass.h"
53 #include "target.h"
54 #include "integrate.h"
56 /* I'm not real happy about this, but we need to handle gimple and
57 non-gimple trees. */
58 #include "gimple.h"
60 /* Inlining, Cloning, Versioning, Parallelization
62 Inlining: a function body is duplicated, but the PARM_DECLs are
63 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64 MODIFY_EXPRs that store to a dedicated returned-value variable.
65 The duplicated eh_region info of the copy will later be appended
66 to the info for the caller; the eh_region info in copied throwing
67 statements and RESX statements are adjusted accordingly.
69 Cloning: (only in C++) We have one body for a con/de/structor, and
70 multiple function decls, each with a unique parameter list.
71 Duplicate the body, using the given splay tree; some parameters
72 will become constants (like 0 or 1).
74 Versioning: a function body is duplicated and the result is a new
75 function rather than into blocks of an existing function as with
76 inlining. Some parameters will become constants.
78 Parallelization: a region of a function is duplicated resulting in
79 a new function. Variables may be replaced with complex expressions
80 to enable shared variable semantics.
82 All of these will simultaneously lookup any callgraph edges. If
83 we're going to inline the duplicated function body, and the given
84 function has some cloned callgraph nodes (one for each place this
85 function will be inlined) those callgraph edges will be duplicated.
86 If we're cloning the body, those callgraph edges will be
87 updated to point into the new body. (Note that the original
88 callgraph node and edge list will not be altered.)
90 See the CALL_EXPR handling case in copy_tree_body_r (). */
92 /* To Do:
94 o In order to make inlining-on-trees work, we pessimized
95 function-local static constants. In particular, they are now
96 always output, even when not addressed. Fix this by treating
97 function-local static constants just like global static
98 constants; the back-end already knows not to output them if they
99 are not needed.
101 o Provide heuristics to clamp inlining of recursive template
102 calls? */
105 /* Weights that estimate_num_insns uses for heuristics in inlining. */
107 eni_weights eni_inlining_weights;
109 /* Weights that estimate_num_insns uses to estimate the size of the
110 produced code. */
112 eni_weights eni_size_weights;
114 /* Weights that estimate_num_insns uses to estimate the time necessary
115 to execute the produced code. */
117 eni_weights eni_time_weights;
119 /* Prototypes. */
121 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
122 static void remap_block (tree *, copy_body_data *);
123 static void copy_bind_expr (tree *, int *, copy_body_data *);
124 static tree mark_local_for_remap_r (tree *, int *, void *);
125 static void unsave_expr_1 (tree);
126 static tree unsave_r (tree *, int *, void *);
127 static void declare_inline_vars (tree, tree);
128 static void remap_save_expr (tree *, void *, int *);
129 static void prepend_lexical_block (tree current_block, tree new_block);
130 static tree copy_decl_to_var (tree, copy_body_data *);
131 static tree copy_result_decl_to_var (tree, copy_body_data *);
132 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133 static gimple remap_gimple_stmt (gimple, copy_body_data *);
134 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
136 /* Insert a tree->tree mapping for ID. Despite the name suggests
137 that the trees should be variables, it is used for more than that. */
139 void
140 insert_decl_map (copy_body_data *id, tree key, tree value)
142 *pointer_map_insert (id->decl_map, key) = value;
144 /* Always insert an identity map as well. If we see this same new
145 node again, we won't want to duplicate it a second time. */
146 if (key != value)
147 *pointer_map_insert (id->decl_map, value) = value;
150 /* Insert a tree->tree mapping for ID. This is only used for
151 variables. */
153 static void
154 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 if (!gimple_in_ssa_p (id->src_cfun))
157 return;
159 if (!MAY_HAVE_DEBUG_STMTS)
160 return;
162 if (!target_for_debug_bind (key))
163 return;
165 gcc_assert (TREE_CODE (key) == PARM_DECL);
166 gcc_assert (TREE_CODE (value) == VAR_DECL);
168 if (!id->debug_map)
169 id->debug_map = pointer_map_create ();
171 *pointer_map_insert (id->debug_map, key) = value;
174 /* Construct new SSA name for old NAME. ID is the inline context. */
176 static tree
177 remap_ssa_name (tree name, copy_body_data *id)
179 tree new_tree;
180 tree *n;
182 gcc_assert (TREE_CODE (name) == SSA_NAME);
184 n = (tree *) pointer_map_contains (id->decl_map, name);
185 if (n)
186 return unshare_expr (*n);
188 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
189 in copy_bb. */
190 new_tree = remap_decl (SSA_NAME_VAR (name), id);
192 /* We might've substituted constant or another SSA_NAME for
193 the variable.
195 Replace the SSA name representing RESULT_DECL by variable during
196 inlining: this saves us from need to introduce PHI node in a case
197 return value is just partly initialized. */
198 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
199 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
200 || !id->transform_return_to_modify))
202 new_tree = make_ssa_name (new_tree, NULL);
203 insert_decl_map (id, name, new_tree);
204 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
205 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
206 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
207 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
209 /* By inlining function having uninitialized variable, we might
210 extend the lifetime (variable might get reused). This cause
211 ICE in the case we end up extending lifetime of SSA name across
212 abnormal edge, but also increase register pressure.
214 We simply initialize all uninitialized vars by 0 except
215 for case we are inlining to very first BB. We can avoid
216 this for all BBs that are not inside strongly connected
217 regions of the CFG, but this is expensive to test. */
218 if (id->entry_bb
219 && is_gimple_reg (SSA_NAME_VAR (name))
220 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
221 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
222 || EDGE_COUNT (id->entry_bb->preds) != 1))
224 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
225 gimple init_stmt;
227 init_stmt = gimple_build_assign (new_tree,
228 fold_convert (TREE_TYPE (new_tree),
229 integer_zero_node));
230 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
231 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
233 else
235 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
236 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
237 == name)
238 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
242 else
243 insert_decl_map (id, name, new_tree);
244 return new_tree;
247 /* If nonzero, we're remapping the contents of inlined debug
248 statements. If negative, an error has occurred, such as a
249 reference to a variable that isn't available in the inlined
250 context. */
251 int processing_debug_stmt = 0;
253 /* Remap DECL during the copying of the BLOCK tree for the function. */
255 tree
256 remap_decl (tree decl, copy_body_data *id)
258 tree *n;
259 tree fn;
261 /* We only remap local variables in the current function. */
262 fn = id->src_fn;
264 /* See if we have remapped this declaration. */
266 n = (tree *) pointer_map_contains (id->decl_map, decl);
268 if (!n && processing_debug_stmt)
270 processing_debug_stmt = -1;
271 return decl;
274 /* If we didn't already have an equivalent for this declaration,
275 create one now. */
276 if (!n)
278 /* Make a copy of the variable or label. */
279 tree t = id->copy_decl (decl, id);
281 /* Remember it, so that if we encounter this local entity again
282 we can reuse this copy. Do this early because remap_type may
283 need this decl for TYPE_STUB_DECL. */
284 insert_decl_map (id, decl, t);
286 if (!DECL_P (t))
287 return t;
289 /* Remap types, if necessary. */
290 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
291 if (TREE_CODE (t) == TYPE_DECL)
292 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
294 /* Remap sizes as necessary. */
295 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
296 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
298 /* If fields, do likewise for offset and qualifier. */
299 if (TREE_CODE (t) == FIELD_DECL)
301 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
302 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
303 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
306 if (cfun && gimple_in_ssa_p (cfun)
307 && (TREE_CODE (t) == VAR_DECL
308 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
310 tree def = gimple_default_def (id->src_cfun, decl);
311 get_var_ann (t);
312 if (TREE_CODE (decl) != PARM_DECL && def)
314 tree map = remap_ssa_name (def, id);
315 /* Watch out RESULT_DECLs whose SSA names map directly
316 to them. */
317 if (TREE_CODE (map) == SSA_NAME
318 && gimple_nop_p (SSA_NAME_DEF_STMT (map)))
319 set_default_def (t, map);
321 add_referenced_var (t);
323 return t;
326 if (id->do_not_unshare)
327 return *n;
328 else
329 return unshare_expr (*n);
332 static tree
333 remap_type_1 (tree type, copy_body_data *id)
335 tree new_tree, t;
337 /* We do need a copy. build and register it now. If this is a pointer or
338 reference type, remap the designated type and make a new pointer or
339 reference type. */
340 if (TREE_CODE (type) == POINTER_TYPE)
342 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
343 TYPE_MODE (type),
344 TYPE_REF_CAN_ALIAS_ALL (type));
345 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
346 new_tree = build_type_attribute_qual_variant (new_tree,
347 TYPE_ATTRIBUTES (type),
348 TYPE_QUALS (type));
349 insert_decl_map (id, type, new_tree);
350 return new_tree;
352 else if (TREE_CODE (type) == REFERENCE_TYPE)
354 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
355 TYPE_MODE (type),
356 TYPE_REF_CAN_ALIAS_ALL (type));
357 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
358 new_tree = build_type_attribute_qual_variant (new_tree,
359 TYPE_ATTRIBUTES (type),
360 TYPE_QUALS (type));
361 insert_decl_map (id, type, new_tree);
362 return new_tree;
364 else
365 new_tree = copy_node (type);
367 insert_decl_map (id, type, new_tree);
369 /* This is a new type, not a copy of an old type. Need to reassociate
370 variants. We can handle everything except the main variant lazily. */
371 t = TYPE_MAIN_VARIANT (type);
372 if (type != t)
374 t = remap_type (t, id);
375 TYPE_MAIN_VARIANT (new_tree) = t;
376 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
377 TYPE_NEXT_VARIANT (t) = new_tree;
379 else
381 TYPE_MAIN_VARIANT (new_tree) = new_tree;
382 TYPE_NEXT_VARIANT (new_tree) = NULL;
385 if (TYPE_STUB_DECL (type))
386 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
388 /* Lazily create pointer and reference types. */
389 TYPE_POINTER_TO (new_tree) = NULL;
390 TYPE_REFERENCE_TO (new_tree) = NULL;
392 switch (TREE_CODE (new_tree))
394 case INTEGER_TYPE:
395 case REAL_TYPE:
396 case FIXED_POINT_TYPE:
397 case ENUMERAL_TYPE:
398 case BOOLEAN_TYPE:
399 t = TYPE_MIN_VALUE (new_tree);
400 if (t && TREE_CODE (t) != INTEGER_CST)
401 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
403 t = TYPE_MAX_VALUE (new_tree);
404 if (t && TREE_CODE (t) != INTEGER_CST)
405 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
406 return new_tree;
408 case FUNCTION_TYPE:
409 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
410 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
411 return new_tree;
413 case ARRAY_TYPE:
414 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
415 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
416 break;
418 case RECORD_TYPE:
419 case UNION_TYPE:
420 case QUAL_UNION_TYPE:
422 tree f, nf = NULL;
424 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
426 t = remap_decl (f, id);
427 DECL_CONTEXT (t) = new_tree;
428 TREE_CHAIN (t) = nf;
429 nf = t;
431 TYPE_FIELDS (new_tree) = nreverse (nf);
433 break;
435 case OFFSET_TYPE:
436 default:
437 /* Shouldn't have been thought variable sized. */
438 gcc_unreachable ();
441 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
442 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
444 return new_tree;
447 tree
448 remap_type (tree type, copy_body_data *id)
450 tree *node;
451 tree tmp;
453 if (type == NULL)
454 return type;
456 /* See if we have remapped this type. */
457 node = (tree *) pointer_map_contains (id->decl_map, type);
458 if (node)
459 return *node;
461 /* The type only needs remapping if it's variably modified. */
462 if (! variably_modified_type_p (type, id->src_fn))
464 insert_decl_map (id, type, type);
465 return type;
468 id->remapping_type_depth++;
469 tmp = remap_type_1 (type, id);
470 id->remapping_type_depth--;
472 return tmp;
475 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
476 is NULL or TYPE has not been remapped before. */
478 static tree
479 remapped_type (tree type, copy_body_data *id)
481 tree *node;
483 if (type == NULL)
484 return type;
486 /* See if we have remapped this type. */
487 node = (tree *) pointer_map_contains (id->decl_map, type);
488 if (node)
489 return *node;
490 else
491 return NULL;
494 /* The type only needs remapping if it's variably modified. */
495 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
497 static bool
498 can_be_nonlocal (tree decl, copy_body_data *id)
500 /* We can not duplicate function decls. */
501 if (TREE_CODE (decl) == FUNCTION_DECL)
502 return true;
504 /* Local static vars must be non-local or we get multiple declaration
505 problems. */
506 if (TREE_CODE (decl) == VAR_DECL
507 && !auto_var_in_fn_p (decl, id->src_fn))
508 return true;
510 /* At the moment dwarf2out can handle only these types of nodes. We
511 can support more later. */
512 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
513 return false;
515 /* We must use global type. We call remapped_type instead of
516 remap_type since we don't want to remap this type here if it
517 hasn't been remapped before. */
518 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
519 return false;
521 /* Wihtout SSA we can't tell if variable is used. */
522 if (!gimple_in_ssa_p (cfun))
523 return false;
525 /* Live variables must be copied so we can attach DECL_RTL. */
526 if (var_ann (decl))
527 return false;
529 return true;
532 static tree
533 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
535 tree old_var;
536 tree new_decls = NULL_TREE;
538 /* Remap its variables. */
539 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
541 tree new_var;
542 tree origin_var = DECL_ORIGIN (old_var);
544 if (can_be_nonlocal (old_var, id))
546 if (TREE_CODE (old_var) == VAR_DECL
547 && ! DECL_EXTERNAL (old_var)
548 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
549 cfun->local_decls = tree_cons (NULL_TREE, old_var,
550 cfun->local_decls);
551 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
552 && !DECL_IGNORED_P (old_var)
553 && nonlocalized_list)
554 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
555 continue;
558 /* Remap the variable. */
559 new_var = remap_decl (old_var, id);
561 /* If we didn't remap this variable, we can't mess with its
562 TREE_CHAIN. If we remapped this variable to the return slot, it's
563 already declared somewhere else, so don't declare it here. */
565 if (new_var == id->retvar)
567 else if (!new_var)
569 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
570 && !DECL_IGNORED_P (old_var)
571 && nonlocalized_list)
572 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
574 else
576 gcc_assert (DECL_P (new_var));
577 TREE_CHAIN (new_var) = new_decls;
578 new_decls = new_var;
582 return nreverse (new_decls);
585 /* Copy the BLOCK to contain remapped versions of the variables
586 therein. And hook the new block into the block-tree. */
588 static void
589 remap_block (tree *block, copy_body_data *id)
591 tree old_block;
592 tree new_block;
593 tree fn;
595 /* Make the new block. */
596 old_block = *block;
597 new_block = make_node (BLOCK);
598 TREE_USED (new_block) = TREE_USED (old_block);
599 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
600 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
601 BLOCK_NONLOCALIZED_VARS (new_block)
602 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
603 *block = new_block;
605 /* Remap its variables. */
606 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
607 &BLOCK_NONLOCALIZED_VARS (new_block),
608 id);
610 fn = id->dst_fn;
612 if (id->transform_lang_insert_block)
613 id->transform_lang_insert_block (new_block);
615 /* Remember the remapped block. */
616 insert_decl_map (id, old_block, new_block);
619 /* Copy the whole block tree and root it in id->block. */
620 static tree
621 remap_blocks (tree block, copy_body_data *id)
623 tree t;
624 tree new_tree = block;
626 if (!block)
627 return NULL;
629 remap_block (&new_tree, id);
630 gcc_assert (new_tree != block);
631 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
632 prepend_lexical_block (new_tree, remap_blocks (t, id));
633 /* Blocks are in arbitrary order, but make things slightly prettier and do
634 not swap order when producing a copy. */
635 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
636 return new_tree;
639 static void
640 copy_statement_list (tree *tp)
642 tree_stmt_iterator oi, ni;
643 tree new_tree;
645 new_tree = alloc_stmt_list ();
646 ni = tsi_start (new_tree);
647 oi = tsi_start (*tp);
648 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
649 *tp = new_tree;
651 for (; !tsi_end_p (oi); tsi_next (&oi))
653 tree stmt = tsi_stmt (oi);
654 if (TREE_CODE (stmt) == STATEMENT_LIST)
655 copy_statement_list (&stmt);
656 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
660 static void
661 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
663 tree block = BIND_EXPR_BLOCK (*tp);
664 /* Copy (and replace) the statement. */
665 copy_tree_r (tp, walk_subtrees, NULL);
666 if (block)
668 remap_block (&block, id);
669 BIND_EXPR_BLOCK (*tp) = block;
672 if (BIND_EXPR_VARS (*tp))
673 /* This will remap a lot of the same decls again, but this should be
674 harmless. */
675 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
679 /* Create a new gimple_seq by remapping all the statements in BODY
680 using the inlining information in ID. */
682 gimple_seq
683 remap_gimple_seq (gimple_seq body, copy_body_data *id)
685 gimple_stmt_iterator si;
686 gimple_seq new_body = NULL;
688 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
690 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
691 gimple_seq_add_stmt (&new_body, new_stmt);
694 return new_body;
698 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
699 block using the mapping information in ID. */
701 static gimple
702 copy_gimple_bind (gimple stmt, copy_body_data *id)
704 gimple new_bind;
705 tree new_block, new_vars;
706 gimple_seq body, new_body;
708 /* Copy the statement. Note that we purposely don't use copy_stmt
709 here because we need to remap statements as we copy. */
710 body = gimple_bind_body (stmt);
711 new_body = remap_gimple_seq (body, id);
713 new_block = gimple_bind_block (stmt);
714 if (new_block)
715 remap_block (&new_block, id);
717 /* This will remap a lot of the same decls again, but this should be
718 harmless. */
719 new_vars = gimple_bind_vars (stmt);
720 if (new_vars)
721 new_vars = remap_decls (new_vars, NULL, id);
723 new_bind = gimple_build_bind (new_vars, new_body, new_block);
725 return new_bind;
729 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
730 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
731 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
732 recursing into the children nodes of *TP. */
734 static tree
735 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
737 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
738 copy_body_data *id = (copy_body_data *) wi_p->info;
739 tree fn = id->src_fn;
741 if (TREE_CODE (*tp) == SSA_NAME)
743 *tp = remap_ssa_name (*tp, id);
744 *walk_subtrees = 0;
745 return NULL;
747 else if (auto_var_in_fn_p (*tp, fn))
749 /* Local variables and labels need to be replaced by equivalent
750 variables. We don't want to copy static variables; there's
751 only one of those, no matter how many times we inline the
752 containing function. Similarly for globals from an outer
753 function. */
754 tree new_decl;
756 /* Remap the declaration. */
757 new_decl = remap_decl (*tp, id);
758 gcc_assert (new_decl);
759 /* Replace this variable with the copy. */
760 STRIP_TYPE_NOPS (new_decl);
761 /* ??? The C++ frontend uses void * pointer zero to initialize
762 any other type. This confuses the middle-end type verification.
763 As cloned bodies do not go through gimplification again the fixup
764 there doesn't trigger. */
765 if (TREE_CODE (new_decl) == INTEGER_CST
766 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
767 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
768 *tp = new_decl;
769 *walk_subtrees = 0;
771 else if (TREE_CODE (*tp) == STATEMENT_LIST)
772 gcc_unreachable ();
773 else if (TREE_CODE (*tp) == SAVE_EXPR)
774 gcc_unreachable ();
775 else if (TREE_CODE (*tp) == LABEL_DECL
776 && (!DECL_CONTEXT (*tp)
777 || decl_function_context (*tp) == id->src_fn))
778 /* These may need to be remapped for EH handling. */
779 *tp = remap_decl (*tp, id);
780 else if (TYPE_P (*tp))
781 /* Types may need remapping as well. */
782 *tp = remap_type (*tp, id);
783 else if (CONSTANT_CLASS_P (*tp))
785 /* If this is a constant, we have to copy the node iff the type
786 will be remapped. copy_tree_r will not copy a constant. */
787 tree new_type = remap_type (TREE_TYPE (*tp), id);
789 if (new_type == TREE_TYPE (*tp))
790 *walk_subtrees = 0;
792 else if (TREE_CODE (*tp) == INTEGER_CST)
793 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
794 TREE_INT_CST_HIGH (*tp));
795 else
797 *tp = copy_node (*tp);
798 TREE_TYPE (*tp) = new_type;
801 else
803 /* Otherwise, just copy the node. Note that copy_tree_r already
804 knows not to copy VAR_DECLs, etc., so this is safe. */
805 if (TREE_CODE (*tp) == INDIRECT_REF)
807 /* Get rid of *& from inline substitutions that can happen when a
808 pointer argument is an ADDR_EXPR. */
809 tree decl = TREE_OPERAND (*tp, 0);
810 tree *n;
812 n = (tree *) pointer_map_contains (id->decl_map, decl);
813 if (n)
815 tree type, new_tree, old;
817 /* If we happen to get an ADDR_EXPR in n->value, strip
818 it manually here as we'll eventually get ADDR_EXPRs
819 which lie about their types pointed to. In this case
820 build_fold_indirect_ref wouldn't strip the
821 INDIRECT_REF, but we absolutely rely on that. As
822 fold_indirect_ref does other useful transformations,
823 try that first, though. */
824 type = TREE_TYPE (TREE_TYPE (*n));
825 new_tree = unshare_expr (*n);
826 old = *tp;
827 *tp = gimple_fold_indirect_ref (new_tree);
828 if (!*tp)
830 if (TREE_CODE (new_tree) == ADDR_EXPR)
832 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
833 type, new_tree);
834 /* ??? We should either assert here or build
835 a VIEW_CONVERT_EXPR instead of blindly leaking
836 incompatible types to our IL. */
837 if (! *tp)
838 *tp = TREE_OPERAND (new_tree, 0);
840 else
842 *tp = build1 (INDIRECT_REF, type, new_tree);
843 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
844 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
847 *walk_subtrees = 0;
848 return NULL;
852 /* Here is the "usual case". Copy this tree node, and then
853 tweak some special cases. */
854 copy_tree_r (tp, walk_subtrees, NULL);
856 /* Global variables we haven't seen yet need to go into referenced
857 vars. If not referenced from types only. */
858 if (gimple_in_ssa_p (cfun)
859 && TREE_CODE (*tp) == VAR_DECL
860 && id->remapping_type_depth == 0
861 && !processing_debug_stmt)
862 add_referenced_var (*tp);
864 /* We should never have TREE_BLOCK set on non-statements. */
865 if (EXPR_P (*tp))
866 gcc_assert (!TREE_BLOCK (*tp));
868 if (TREE_CODE (*tp) != OMP_CLAUSE)
869 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
871 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
873 /* The copied TARGET_EXPR has never been expanded, even if the
874 original node was expanded already. */
875 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
876 TREE_OPERAND (*tp, 3) = NULL_TREE;
878 else if (TREE_CODE (*tp) == ADDR_EXPR)
880 /* Variable substitution need not be simple. In particular,
881 the INDIRECT_REF substitution above. Make sure that
882 TREE_CONSTANT and friends are up-to-date. But make sure
883 to not improperly set TREE_BLOCK on some sub-expressions. */
884 int invariant = is_gimple_min_invariant (*tp);
885 tree block = id->block;
886 id->block = NULL_TREE;
887 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
888 id->block = block;
890 /* Handle the case where we substituted an INDIRECT_REF
891 into the operand of the ADDR_EXPR. */
892 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
893 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
894 else
895 recompute_tree_invariant_for_addr_expr (*tp);
897 /* If this used to be invariant, but is not any longer,
898 then regimplification is probably needed. */
899 if (invariant && !is_gimple_min_invariant (*tp))
900 id->regimplify = true;
902 *walk_subtrees = 0;
906 /* Keep iterating. */
907 return NULL_TREE;
911 /* Called from copy_body_id via walk_tree. DATA is really a
912 `copy_body_data *'. */
914 tree
915 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
917 copy_body_data *id = (copy_body_data *) data;
918 tree fn = id->src_fn;
919 tree new_block;
921 /* Begin by recognizing trees that we'll completely rewrite for the
922 inlining context. Our output for these trees is completely
923 different from out input (e.g. RETURN_EXPR is deleted, and morphs
924 into an edge). Further down, we'll handle trees that get
925 duplicated and/or tweaked. */
927 /* When requested, RETURN_EXPRs should be transformed to just the
928 contained MODIFY_EXPR. The branch semantics of the return will
929 be handled elsewhere by manipulating the CFG rather than a statement. */
930 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
932 tree assignment = TREE_OPERAND (*tp, 0);
934 /* If we're returning something, just turn that into an
935 assignment into the equivalent of the original RESULT_DECL.
936 If the "assignment" is just the result decl, the result
937 decl has already been set (e.g. a recent "foo (&result_decl,
938 ...)"); just toss the entire RETURN_EXPR. */
939 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
941 /* Replace the RETURN_EXPR with (a copy of) the
942 MODIFY_EXPR hanging underneath. */
943 *tp = copy_node (assignment);
945 else /* Else the RETURN_EXPR returns no value. */
947 *tp = NULL;
948 return (tree) (void *)1;
951 else if (TREE_CODE (*tp) == SSA_NAME)
953 *tp = remap_ssa_name (*tp, id);
954 *walk_subtrees = 0;
955 return NULL;
958 /* Local variables and labels need to be replaced by equivalent
959 variables. We don't want to copy static variables; there's only
960 one of those, no matter how many times we inline the containing
961 function. Similarly for globals from an outer function. */
962 else if (auto_var_in_fn_p (*tp, fn))
964 tree new_decl;
966 /* Remap the declaration. */
967 new_decl = remap_decl (*tp, id);
968 gcc_assert (new_decl);
969 /* Replace this variable with the copy. */
970 STRIP_TYPE_NOPS (new_decl);
971 *tp = new_decl;
972 *walk_subtrees = 0;
974 else if (TREE_CODE (*tp) == STATEMENT_LIST)
975 copy_statement_list (tp);
976 else if (TREE_CODE (*tp) == SAVE_EXPR
977 || TREE_CODE (*tp) == TARGET_EXPR)
978 remap_save_expr (tp, id->decl_map, walk_subtrees);
979 else if (TREE_CODE (*tp) == LABEL_DECL
980 && (! DECL_CONTEXT (*tp)
981 || decl_function_context (*tp) == id->src_fn))
982 /* These may need to be remapped for EH handling. */
983 *tp = remap_decl (*tp, id);
984 else if (TREE_CODE (*tp) == BIND_EXPR)
985 copy_bind_expr (tp, walk_subtrees, id);
986 /* Types may need remapping as well. */
987 else if (TYPE_P (*tp))
988 *tp = remap_type (*tp, id);
990 /* If this is a constant, we have to copy the node iff the type will be
991 remapped. copy_tree_r will not copy a constant. */
992 else if (CONSTANT_CLASS_P (*tp))
994 tree new_type = remap_type (TREE_TYPE (*tp), id);
996 if (new_type == TREE_TYPE (*tp))
997 *walk_subtrees = 0;
999 else if (TREE_CODE (*tp) == INTEGER_CST)
1000 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1001 TREE_INT_CST_HIGH (*tp));
1002 else
1004 *tp = copy_node (*tp);
1005 TREE_TYPE (*tp) = new_type;
1009 /* Otherwise, just copy the node. Note that copy_tree_r already
1010 knows not to copy VAR_DECLs, etc., so this is safe. */
1011 else
1013 /* Here we handle trees that are not completely rewritten.
1014 First we detect some inlining-induced bogosities for
1015 discarding. */
1016 if (TREE_CODE (*tp) == MODIFY_EXPR
1017 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1018 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1020 /* Some assignments VAR = VAR; don't generate any rtl code
1021 and thus don't count as variable modification. Avoid
1022 keeping bogosities like 0 = 0. */
1023 tree decl = TREE_OPERAND (*tp, 0), value;
1024 tree *n;
1026 n = (tree *) pointer_map_contains (id->decl_map, decl);
1027 if (n)
1029 value = *n;
1030 STRIP_TYPE_NOPS (value);
1031 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1033 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1034 return copy_tree_body_r (tp, walk_subtrees, data);
1038 else if (TREE_CODE (*tp) == INDIRECT_REF)
1040 /* Get rid of *& from inline substitutions that can happen when a
1041 pointer argument is an ADDR_EXPR. */
1042 tree decl = TREE_OPERAND (*tp, 0);
1043 tree *n;
1045 n = (tree *) pointer_map_contains (id->decl_map, decl);
1046 if (n)
1048 tree new_tree;
1049 tree old;
1050 /* If we happen to get an ADDR_EXPR in n->value, strip
1051 it manually here as we'll eventually get ADDR_EXPRs
1052 which lie about their types pointed to. In this case
1053 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1054 but we absolutely rely on that. As fold_indirect_ref
1055 does other useful transformations, try that first, though. */
1056 tree type = TREE_TYPE (TREE_TYPE (*n));
1057 if (id->do_not_unshare)
1058 new_tree = *n;
1059 else
1060 new_tree = unshare_expr (*n);
1061 old = *tp;
1062 *tp = gimple_fold_indirect_ref (new_tree);
1063 if (! *tp)
1065 if (TREE_CODE (new_tree) == ADDR_EXPR)
1067 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1068 type, new_tree);
1069 /* ??? We should either assert here or build
1070 a VIEW_CONVERT_EXPR instead of blindly leaking
1071 incompatible types to our IL. */
1072 if (! *tp)
1073 *tp = TREE_OPERAND (new_tree, 0);
1075 else
1077 *tp = build1 (INDIRECT_REF, type, new_tree);
1078 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1079 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1082 *walk_subtrees = 0;
1083 return NULL;
1087 /* Here is the "usual case". Copy this tree node, and then
1088 tweak some special cases. */
1089 copy_tree_r (tp, walk_subtrees, NULL);
1091 /* Global variables we haven't seen yet needs to go into referenced
1092 vars. If not referenced from types or debug stmts only. */
1093 if (gimple_in_ssa_p (cfun)
1094 && TREE_CODE (*tp) == VAR_DECL
1095 && id->remapping_type_depth == 0
1096 && !processing_debug_stmt)
1097 add_referenced_var (*tp);
1099 /* If EXPR has block defined, map it to newly constructed block.
1100 When inlining we want EXPRs without block appear in the block
1101 of function call. */
1102 if (EXPR_P (*tp))
1104 new_block = id->block;
1105 if (TREE_BLOCK (*tp))
1107 tree *n;
1108 n = (tree *) pointer_map_contains (id->decl_map,
1109 TREE_BLOCK (*tp));
1110 gcc_assert (n);
1111 new_block = *n;
1113 TREE_BLOCK (*tp) = new_block;
1116 if (TREE_CODE (*tp) != OMP_CLAUSE)
1117 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1119 /* The copied TARGET_EXPR has never been expanded, even if the
1120 original node was expanded already. */
1121 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1123 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1124 TREE_OPERAND (*tp, 3) = NULL_TREE;
1127 /* Variable substitution need not be simple. In particular, the
1128 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1129 and friends are up-to-date. */
1130 else if (TREE_CODE (*tp) == ADDR_EXPR)
1132 int invariant = is_gimple_min_invariant (*tp);
1133 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1135 /* Handle the case where we substituted an INDIRECT_REF
1136 into the operand of the ADDR_EXPR. */
1137 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1138 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1139 else
1140 recompute_tree_invariant_for_addr_expr (*tp);
1142 /* If this used to be invariant, but is not any longer,
1143 then regimplification is probably needed. */
1144 if (invariant && !is_gimple_min_invariant (*tp))
1145 id->regimplify = true;
1147 *walk_subtrees = 0;
1151 /* Keep iterating. */
1152 return NULL_TREE;
1155 /* Helper for remap_gimple_stmt. Given an EH region number for the
1156 source function, map that to the duplicate EH region number in
1157 the destination function. */
1159 static int
1160 remap_eh_region_nr (int old_nr, copy_body_data *id)
1162 eh_region old_r, new_r;
1163 void **slot;
1165 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1166 slot = pointer_map_contains (id->eh_map, old_r);
1167 new_r = (eh_region) *slot;
1169 return new_r->index;
1172 /* Similar, but operate on INTEGER_CSTs. */
1174 static tree
1175 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1177 int old_nr, new_nr;
1179 old_nr = tree_low_cst (old_t_nr, 0);
1180 new_nr = remap_eh_region_nr (old_nr, id);
1182 return build_int_cst (NULL, new_nr);
1185 /* Helper for copy_bb. Remap statement STMT using the inlining
1186 information in ID. Return the new statement copy. */
1188 static gimple
1189 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1191 gimple copy = NULL;
1192 struct walk_stmt_info wi;
1193 tree new_block;
1194 bool skip_first = false;
1196 /* Begin by recognizing trees that we'll completely rewrite for the
1197 inlining context. Our output for these trees is completely
1198 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1199 into an edge). Further down, we'll handle trees that get
1200 duplicated and/or tweaked. */
1202 /* When requested, GIMPLE_RETURNs should be transformed to just the
1203 contained GIMPLE_ASSIGN. The branch semantics of the return will
1204 be handled elsewhere by manipulating the CFG rather than the
1205 statement. */
1206 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1208 tree retval = gimple_return_retval (stmt);
1210 /* If we're returning something, just turn that into an
1211 assignment into the equivalent of the original RESULT_DECL.
1212 If RETVAL is just the result decl, the result decl has
1213 already been set (e.g. a recent "foo (&result_decl, ...)");
1214 just toss the entire GIMPLE_RETURN. */
1215 if (retval && TREE_CODE (retval) != RESULT_DECL)
1217 copy = gimple_build_assign (id->retvar, retval);
1218 /* id->retvar is already substituted. Skip it on later remapping. */
1219 skip_first = true;
1221 else
1222 return gimple_build_nop ();
1224 else if (gimple_has_substatements (stmt))
1226 gimple_seq s1, s2;
1228 /* When cloning bodies from the C++ front end, we will be handed bodies
1229 in High GIMPLE form. Handle here all the High GIMPLE statements that
1230 have embedded statements. */
1231 switch (gimple_code (stmt))
1233 case GIMPLE_BIND:
1234 copy = copy_gimple_bind (stmt, id);
1235 break;
1237 case GIMPLE_CATCH:
1238 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1239 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1240 break;
1242 case GIMPLE_EH_FILTER:
1243 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1244 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1245 break;
1247 case GIMPLE_TRY:
1248 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1249 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1250 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1251 break;
1253 case GIMPLE_WITH_CLEANUP_EXPR:
1254 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1255 copy = gimple_build_wce (s1);
1256 break;
1258 case GIMPLE_OMP_PARALLEL:
1259 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1260 copy = gimple_build_omp_parallel
1261 (s1,
1262 gimple_omp_parallel_clauses (stmt),
1263 gimple_omp_parallel_child_fn (stmt),
1264 gimple_omp_parallel_data_arg (stmt));
1265 break;
1267 case GIMPLE_OMP_TASK:
1268 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1269 copy = gimple_build_omp_task
1270 (s1,
1271 gimple_omp_task_clauses (stmt),
1272 gimple_omp_task_child_fn (stmt),
1273 gimple_omp_task_data_arg (stmt),
1274 gimple_omp_task_copy_fn (stmt),
1275 gimple_omp_task_arg_size (stmt),
1276 gimple_omp_task_arg_align (stmt));
1277 break;
1279 case GIMPLE_OMP_FOR:
1280 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1281 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1282 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1283 gimple_omp_for_collapse (stmt), s2);
1285 size_t i;
1286 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1288 gimple_omp_for_set_index (copy, i,
1289 gimple_omp_for_index (stmt, i));
1290 gimple_omp_for_set_initial (copy, i,
1291 gimple_omp_for_initial (stmt, i));
1292 gimple_omp_for_set_final (copy, i,
1293 gimple_omp_for_final (stmt, i));
1294 gimple_omp_for_set_incr (copy, i,
1295 gimple_omp_for_incr (stmt, i));
1296 gimple_omp_for_set_cond (copy, i,
1297 gimple_omp_for_cond (stmt, i));
1300 break;
1302 case GIMPLE_OMP_MASTER:
1303 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1304 copy = gimple_build_omp_master (s1);
1305 break;
1307 case GIMPLE_OMP_ORDERED:
1308 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1309 copy = gimple_build_omp_ordered (s1);
1310 break;
1312 case GIMPLE_OMP_SECTION:
1313 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1314 copy = gimple_build_omp_section (s1);
1315 break;
1317 case GIMPLE_OMP_SECTIONS:
1318 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1319 copy = gimple_build_omp_sections
1320 (s1, gimple_omp_sections_clauses (stmt));
1321 break;
1323 case GIMPLE_OMP_SINGLE:
1324 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1325 copy = gimple_build_omp_single
1326 (s1, gimple_omp_single_clauses (stmt));
1327 break;
1329 case GIMPLE_OMP_CRITICAL:
1330 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1331 copy
1332 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1333 break;
1335 default:
1336 gcc_unreachable ();
1339 else
1341 if (gimple_assign_copy_p (stmt)
1342 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1343 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1345 /* Here we handle statements that are not completely rewritten.
1346 First we detect some inlining-induced bogosities for
1347 discarding. */
1349 /* Some assignments VAR = VAR; don't generate any rtl code
1350 and thus don't count as variable modification. Avoid
1351 keeping bogosities like 0 = 0. */
1352 tree decl = gimple_assign_lhs (stmt), value;
1353 tree *n;
1355 n = (tree *) pointer_map_contains (id->decl_map, decl);
1356 if (n)
1358 value = *n;
1359 STRIP_TYPE_NOPS (value);
1360 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1361 return gimple_build_nop ();
1365 if (gimple_debug_bind_p (stmt))
1367 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1368 gimple_debug_bind_get_value (stmt),
1369 stmt);
1370 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1371 return copy;
1374 /* Create a new deep copy of the statement. */
1375 copy = gimple_copy (stmt);
1377 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1378 RESX and EH_DISPATCH. */
1379 if (id->eh_map)
1380 switch (gimple_code (copy))
1382 case GIMPLE_CALL:
1384 tree r, fndecl = gimple_call_fndecl (copy);
1385 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1386 switch (DECL_FUNCTION_CODE (fndecl))
1388 case BUILT_IN_EH_COPY_VALUES:
1389 r = gimple_call_arg (copy, 1);
1390 r = remap_eh_region_tree_nr (r, id);
1391 gimple_call_set_arg (copy, 1, r);
1392 /* FALLTHRU */
1394 case BUILT_IN_EH_POINTER:
1395 case BUILT_IN_EH_FILTER:
1396 r = gimple_call_arg (copy, 0);
1397 r = remap_eh_region_tree_nr (r, id);
1398 gimple_call_set_arg (copy, 0, r);
1399 break;
1401 default:
1402 break;
1405 break;
1407 case GIMPLE_RESX:
1409 int r = gimple_resx_region (copy);
1410 r = remap_eh_region_nr (r, id);
1411 gimple_resx_set_region (copy, r);
1413 break;
1415 case GIMPLE_EH_DISPATCH:
1417 int r = gimple_eh_dispatch_region (copy);
1418 r = remap_eh_region_nr (r, id);
1419 gimple_eh_dispatch_set_region (copy, r);
1421 break;
1423 default:
1424 break;
1428 /* If STMT has a block defined, map it to the newly constructed
1429 block. When inlining we want statements without a block to
1430 appear in the block of the function call. */
1431 new_block = id->block;
1432 if (gimple_block (copy))
1434 tree *n;
1435 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1436 gcc_assert (n);
1437 new_block = *n;
1440 gimple_set_block (copy, new_block);
1442 if (gimple_debug_bind_p (copy))
1443 return copy;
1445 /* Remap all the operands in COPY. */
1446 memset (&wi, 0, sizeof (wi));
1447 wi.info = id;
1448 if (skip_first)
1449 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1450 else
1451 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1453 /* Clear the copied virtual operands. We are not remapping them here
1454 but are going to recreate them from scratch. */
1455 if (gimple_has_mem_ops (copy))
1457 gimple_set_vdef (copy, NULL_TREE);
1458 gimple_set_vuse (copy, NULL_TREE);
1461 return copy;
1465 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1466 later */
1468 static basic_block
1469 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1470 gcov_type count_scale)
1472 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1473 basic_block copy_basic_block;
1474 tree decl;
1476 /* create_basic_block() will append every new block to
1477 basic_block_info automatically. */
1478 copy_basic_block = create_basic_block (NULL, (void *) 0,
1479 (basic_block) bb->prev_bb->aux);
1480 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1482 /* We are going to rebuild frequencies from scratch. These values
1483 have just small importance to drive canonicalize_loop_headers. */
1484 copy_basic_block->frequency = ((gcov_type)bb->frequency
1485 * frequency_scale / REG_BR_PROB_BASE);
1487 if (copy_basic_block->frequency > BB_FREQ_MAX)
1488 copy_basic_block->frequency = BB_FREQ_MAX;
1490 copy_gsi = gsi_start_bb (copy_basic_block);
1492 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1494 gimple stmt = gsi_stmt (gsi);
1495 gimple orig_stmt = stmt;
1497 id->regimplify = false;
1498 stmt = remap_gimple_stmt (stmt, id);
1499 if (gimple_nop_p (stmt))
1500 continue;
1502 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1503 seq_gsi = copy_gsi;
1505 /* With return slot optimization we can end up with
1506 non-gimple (foo *)&this->m, fix that here. */
1507 if (is_gimple_assign (stmt)
1508 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1509 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1511 tree new_rhs;
1512 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1513 gimple_assign_rhs1 (stmt),
1514 true, NULL, false, GSI_NEW_STMT);
1515 gimple_assign_set_rhs1 (stmt, new_rhs);
1516 id->regimplify = false;
1519 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1521 if (id->regimplify)
1522 gimple_regimplify_operands (stmt, &seq_gsi);
1524 /* If copy_basic_block has been empty at the start of this iteration,
1525 call gsi_start_bb again to get at the newly added statements. */
1526 if (gsi_end_p (copy_gsi))
1527 copy_gsi = gsi_start_bb (copy_basic_block);
1528 else
1529 gsi_next (&copy_gsi);
1531 /* Process the new statement. The call to gimple_regimplify_operands
1532 possibly turned the statement into multiple statements, we
1533 need to process all of them. */
1536 tree fn;
1538 stmt = gsi_stmt (copy_gsi);
1539 if (is_gimple_call (stmt)
1540 && gimple_call_va_arg_pack_p (stmt)
1541 && id->gimple_call)
1543 /* __builtin_va_arg_pack () should be replaced by
1544 all arguments corresponding to ... in the caller. */
1545 tree p;
1546 gimple new_call;
1547 VEC(tree, heap) *argarray;
1548 size_t nargs = gimple_call_num_args (id->gimple_call);
1549 size_t n;
1551 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1552 nargs--;
1554 /* Create the new array of arguments. */
1555 n = nargs + gimple_call_num_args (stmt);
1556 argarray = VEC_alloc (tree, heap, n);
1557 VEC_safe_grow (tree, heap, argarray, n);
1559 /* Copy all the arguments before '...' */
1560 memcpy (VEC_address (tree, argarray),
1561 gimple_call_arg_ptr (stmt, 0),
1562 gimple_call_num_args (stmt) * sizeof (tree));
1564 /* Append the arguments passed in '...' */
1565 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1566 gimple_call_arg_ptr (id->gimple_call, 0)
1567 + (gimple_call_num_args (id->gimple_call) - nargs),
1568 nargs * sizeof (tree));
1570 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1571 argarray);
1573 VEC_free (tree, heap, argarray);
1575 /* Copy all GIMPLE_CALL flags, location and block, except
1576 GF_CALL_VA_ARG_PACK. */
1577 gimple_call_copy_flags (new_call, stmt);
1578 gimple_call_set_va_arg_pack (new_call, false);
1579 gimple_set_location (new_call, gimple_location (stmt));
1580 gimple_set_block (new_call, gimple_block (stmt));
1581 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1583 gsi_replace (&copy_gsi, new_call, false);
1584 gimple_set_bb (stmt, NULL);
1585 stmt = new_call;
1587 else if (is_gimple_call (stmt)
1588 && id->gimple_call
1589 && (decl = gimple_call_fndecl (stmt))
1590 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1591 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1593 /* __builtin_va_arg_pack_len () should be replaced by
1594 the number of anonymous arguments. */
1595 size_t nargs = gimple_call_num_args (id->gimple_call);
1596 tree count, p;
1597 gimple new_stmt;
1599 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1600 nargs--;
1602 count = build_int_cst (integer_type_node, nargs);
1603 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1604 gsi_replace (&copy_gsi, new_stmt, false);
1605 stmt = new_stmt;
1608 /* Statements produced by inlining can be unfolded, especially
1609 when we constant propagated some operands. We can't fold
1610 them right now for two reasons:
1611 1) folding require SSA_NAME_DEF_STMTs to be correct
1612 2) we can't change function calls to builtins.
1613 So we just mark statement for later folding. We mark
1614 all new statements, instead just statements that has changed
1615 by some nontrivial substitution so even statements made
1616 foldable indirectly are updated. If this turns out to be
1617 expensive, copy_body can be told to watch for nontrivial
1618 changes. */
1619 if (id->statements_to_fold)
1620 pointer_set_insert (id->statements_to_fold, stmt);
1622 /* We're duplicating a CALL_EXPR. Find any corresponding
1623 callgraph edges and update or duplicate them. */
1624 if (is_gimple_call (stmt))
1626 struct cgraph_edge *edge;
1627 int flags;
1629 switch (id->transform_call_graph_edges)
1631 case CB_CGE_DUPLICATE:
1632 edge = cgraph_edge (id->src_node, orig_stmt);
1633 if (edge)
1634 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1635 gimple_uid (stmt),
1636 REG_BR_PROB_BASE, 1,
1637 edge->frequency, true);
1638 break;
1640 case CB_CGE_MOVE_CLONES:
1641 cgraph_set_call_stmt_including_clones (id->dst_node,
1642 orig_stmt, stmt);
1643 edge = cgraph_edge (id->dst_node, stmt);
1644 break;
1646 case CB_CGE_MOVE:
1647 edge = cgraph_edge (id->dst_node, orig_stmt);
1648 if (edge)
1649 cgraph_set_call_stmt (edge, stmt);
1650 break;
1652 default:
1653 gcc_unreachable ();
1656 /* Constant propagation on argument done during inlining
1657 may create new direct call. Produce an edge for it. */
1658 if ((!edge
1659 || (edge->indirect_call
1660 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1661 && is_gimple_call (stmt)
1662 && (fn = gimple_call_fndecl (stmt)) != NULL)
1664 struct cgraph_node *dest = cgraph_node (fn);
1666 /* We have missing edge in the callgraph. This can happen
1667 when previous inlining turned an indirect call into a
1668 direct call by constant propagating arguments. In all
1669 other cases we hit a bug (incorrect node sharing is the
1670 most common reason for missing edges). */
1671 gcc_assert (dest->needed || !dest->analyzed);
1672 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1673 cgraph_create_edge_including_clones
1674 (id->dst_node, dest, stmt, bb->count,
1675 compute_call_stmt_bb_frequency (id->dst_node->decl, bb),
1676 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1677 else
1678 cgraph_create_edge (id->dst_node, dest, stmt,
1679 bb->count, CGRAPH_FREQ_BASE,
1680 bb->loop_depth)->inline_failed
1681 = CIF_ORIGINALLY_INDIRECT_CALL;
1682 if (dump_file)
1684 fprintf (dump_file, "Created new direct edge to %s",
1685 cgraph_node_name (dest));
1689 flags = gimple_call_flags (stmt);
1690 if (flags & ECF_MAY_BE_ALLOCA)
1691 cfun->calls_alloca = true;
1692 if (flags & ECF_RETURNS_TWICE)
1693 cfun->calls_setjmp = true;
1696 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1697 id->eh_map, id->eh_lp_nr);
1699 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1701 ssa_op_iter i;
1702 tree def;
1704 find_new_referenced_vars (gsi_stmt (copy_gsi));
1705 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1706 if (TREE_CODE (def) == SSA_NAME)
1707 SSA_NAME_DEF_STMT (def) = stmt;
1710 gsi_next (&copy_gsi);
1712 while (!gsi_end_p (copy_gsi));
1714 copy_gsi = gsi_last_bb (copy_basic_block);
1717 return copy_basic_block;
1720 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1721 form is quite easy, since dominator relationship for old basic blocks does
1722 not change.
1724 There is however exception where inlining might change dominator relation
1725 across EH edges from basic block within inlined functions destinating
1726 to landing pads in function we inline into.
1728 The function fills in PHI_RESULTs of such PHI nodes if they refer
1729 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1730 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1731 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1732 set, and this means that there will be no overlapping live ranges
1733 for the underlying symbol.
1735 This might change in future if we allow redirecting of EH edges and
1736 we might want to change way build CFG pre-inlining to include
1737 all the possible edges then. */
1738 static void
1739 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1740 bool can_throw, bool nonlocal_goto)
1742 edge e;
1743 edge_iterator ei;
1745 FOR_EACH_EDGE (e, ei, bb->succs)
1746 if (!e->dest->aux
1747 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1749 gimple phi;
1750 gimple_stmt_iterator si;
1752 if (!nonlocal_goto)
1753 gcc_assert (e->flags & EDGE_EH);
1755 if (!can_throw)
1756 gcc_assert (!(e->flags & EDGE_EH));
1758 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1760 edge re;
1762 phi = gsi_stmt (si);
1764 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1765 gcc_assert (!e->dest->aux);
1767 gcc_assert ((e->flags & EDGE_EH)
1768 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1770 if (!is_gimple_reg (PHI_RESULT (phi)))
1772 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1773 continue;
1776 re = find_edge (ret_bb, e->dest);
1777 gcc_assert (re);
1778 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1779 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1781 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1782 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1788 /* Copy edges from BB into its copy constructed earlier, scale profile
1789 accordingly. Edges will be taken care of later. Assume aux
1790 pointers to point to the copies of each BB. */
1792 static void
1793 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1795 basic_block new_bb = (basic_block) bb->aux;
1796 edge_iterator ei;
1797 edge old_edge;
1798 gimple_stmt_iterator si;
1799 int flags;
1801 /* Use the indices from the original blocks to create edges for the
1802 new ones. */
1803 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1804 if (!(old_edge->flags & EDGE_EH))
1806 edge new_edge;
1808 flags = old_edge->flags;
1810 /* Return edges do get a FALLTHRU flag when the get inlined. */
1811 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1812 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1813 flags |= EDGE_FALLTHRU;
1814 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1815 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1816 new_edge->probability = old_edge->probability;
1819 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1820 return;
1822 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1824 gimple copy_stmt;
1825 bool can_throw, nonlocal_goto;
1827 copy_stmt = gsi_stmt (si);
1828 if (!is_gimple_debug (copy_stmt))
1830 update_stmt (copy_stmt);
1831 if (gimple_in_ssa_p (cfun))
1832 mark_symbols_for_renaming (copy_stmt);
1835 /* Do this before the possible split_block. */
1836 gsi_next (&si);
1838 /* If this tree could throw an exception, there are two
1839 cases where we need to add abnormal edge(s): the
1840 tree wasn't in a region and there is a "current
1841 region" in the caller; or the original tree had
1842 EH edges. In both cases split the block after the tree,
1843 and add abnormal edge(s) as needed; we need both
1844 those from the callee and the caller.
1845 We check whether the copy can throw, because the const
1846 propagation can change an INDIRECT_REF which throws
1847 into a COMPONENT_REF which doesn't. If the copy
1848 can throw, the original could also throw. */
1849 can_throw = stmt_can_throw_internal (copy_stmt);
1850 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1852 if (can_throw || nonlocal_goto)
1854 if (!gsi_end_p (si))
1855 /* Note that bb's predecessor edges aren't necessarily
1856 right at this point; split_block doesn't care. */
1858 edge e = split_block (new_bb, copy_stmt);
1860 new_bb = e->dest;
1861 new_bb->aux = e->src->aux;
1862 si = gsi_start_bb (new_bb);
1866 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1867 make_eh_dispatch_edges (copy_stmt);
1868 else if (can_throw)
1869 make_eh_edges (copy_stmt);
1871 if (nonlocal_goto)
1872 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1874 if ((can_throw || nonlocal_goto)
1875 && gimple_in_ssa_p (cfun))
1876 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1877 can_throw, nonlocal_goto);
1881 /* Copy the PHIs. All blocks and edges are copied, some blocks
1882 was possibly split and new outgoing EH edges inserted.
1883 BB points to the block of original function and AUX pointers links
1884 the original and newly copied blocks. */
1886 static void
1887 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1889 basic_block const new_bb = (basic_block) bb->aux;
1890 edge_iterator ei;
1891 gimple phi;
1892 gimple_stmt_iterator si;
1894 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1896 tree res, new_res;
1897 gimple new_phi;
1898 edge new_edge;
1900 phi = gsi_stmt (si);
1901 res = PHI_RESULT (phi);
1902 new_res = res;
1903 if (is_gimple_reg (res))
1905 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1906 SSA_NAME_DEF_STMT (new_res)
1907 = new_phi = create_phi_node (new_res, new_bb);
1908 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1910 edge const old_edge
1911 = find_edge ((basic_block) new_edge->src->aux, bb);
1912 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1913 tree new_arg = arg;
1914 tree block = id->block;
1915 id->block = NULL_TREE;
1916 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1917 id->block = block;
1918 gcc_assert (new_arg);
1919 /* With return slot optimization we can end up with
1920 non-gimple (foo *)&this->m, fix that here. */
1921 if (TREE_CODE (new_arg) != SSA_NAME
1922 && TREE_CODE (new_arg) != FUNCTION_DECL
1923 && !is_gimple_val (new_arg))
1925 gimple_seq stmts = NULL;
1926 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1927 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1929 add_phi_arg (new_phi, new_arg, new_edge,
1930 gimple_phi_arg_location_from_edge (phi, old_edge));
1937 /* Wrapper for remap_decl so it can be used as a callback. */
1939 static tree
1940 remap_decl_1 (tree decl, void *data)
1942 return remap_decl (decl, (copy_body_data *) data);
1945 /* Build struct function and associated datastructures for the new clone
1946 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1948 static void
1949 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1950 int frequency)
1952 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1953 gcov_type count_scale, frequency_scale;
1955 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1956 count_scale = (REG_BR_PROB_BASE * count
1957 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1958 else
1959 count_scale = 1;
1961 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1962 frequency_scale = (REG_BR_PROB_BASE * frequency
1964 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1965 else
1966 frequency_scale = count_scale;
1968 /* Register specific tree functions. */
1969 gimple_register_cfg_hooks ();
1971 /* Get clean struct function. */
1972 push_struct_function (new_fndecl);
1974 /* We will rebuild these, so just sanity check that they are empty. */
1975 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
1976 gcc_assert (cfun->local_decls == NULL);
1977 gcc_assert (cfun->cfg == NULL);
1978 gcc_assert (cfun->decl == new_fndecl);
1980 /* Copy items we preserve during clonning. */
1981 cfun->static_chain_decl = src_cfun->static_chain_decl;
1982 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
1983 cfun->function_end_locus = src_cfun->function_end_locus;
1984 cfun->curr_properties = src_cfun->curr_properties;
1985 cfun->last_verified = src_cfun->last_verified;
1986 if (src_cfun->ipa_transforms_to_apply)
1987 cfun->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
1988 src_cfun->ipa_transforms_to_apply);
1989 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
1990 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
1991 cfun->function_frequency = src_cfun->function_frequency;
1992 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
1993 cfun->stdarg = src_cfun->stdarg;
1994 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
1995 cfun->after_inlining = src_cfun->after_inlining;
1996 cfun->returns_struct = src_cfun->returns_struct;
1997 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
1998 cfun->after_tree_profile = src_cfun->after_tree_profile;
2000 init_empty_tree_cfg ();
2002 ENTRY_BLOCK_PTR->count =
2003 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2004 REG_BR_PROB_BASE);
2005 ENTRY_BLOCK_PTR->frequency =
2006 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
2007 frequency_scale / REG_BR_PROB_BASE);
2008 EXIT_BLOCK_PTR->count =
2009 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2010 REG_BR_PROB_BASE);
2011 EXIT_BLOCK_PTR->frequency =
2012 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
2013 frequency_scale / REG_BR_PROB_BASE);
2014 if (src_cfun->eh)
2015 init_eh_for_function ();
2017 if (src_cfun->gimple_df)
2019 init_tree_ssa (cfun);
2020 cfun->gimple_df->in_ssa_p = true;
2021 init_ssa_operands ();
2023 pop_cfun ();
2026 /* Make a copy of the body of FN so that it can be inserted inline in
2027 another function. Walks FN via CFG, returns new fndecl. */
2029 static tree
2030 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
2031 basic_block entry_block_map, basic_block exit_block_map)
2033 tree callee_fndecl = id->src_fn;
2034 /* Original cfun for the callee, doesn't change. */
2035 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2036 struct function *cfun_to_copy;
2037 basic_block bb;
2038 tree new_fndecl = NULL;
2039 gcov_type count_scale, frequency_scale;
2040 int last;
2042 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2043 count_scale = (REG_BR_PROB_BASE * count
2044 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2045 else
2046 count_scale = 1;
2048 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
2049 frequency_scale = (REG_BR_PROB_BASE * frequency
2051 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
2052 else
2053 frequency_scale = count_scale;
2055 /* Register specific tree functions. */
2056 gimple_register_cfg_hooks ();
2058 /* Must have a CFG here at this point. */
2059 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2060 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2062 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2064 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2065 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2066 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2067 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2069 /* Duplicate any exception-handling regions. */
2070 if (cfun->eh)
2071 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2072 remap_decl_1, id);
2074 /* Use aux pointers to map the original blocks to copy. */
2075 FOR_EACH_BB_FN (bb, cfun_to_copy)
2077 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2078 bb->aux = new_bb;
2079 new_bb->aux = bb;
2082 last = last_basic_block;
2084 /* Now that we've duplicated the blocks, duplicate their edges. */
2085 FOR_ALL_BB_FN (bb, cfun_to_copy)
2086 copy_edges_for_bb (bb, count_scale, exit_block_map);
2088 if (gimple_in_ssa_p (cfun))
2089 FOR_ALL_BB_FN (bb, cfun_to_copy)
2090 copy_phis_for_bb (bb, id);
2092 FOR_ALL_BB_FN (bb, cfun_to_copy)
2094 ((basic_block)bb->aux)->aux = NULL;
2095 bb->aux = NULL;
2098 /* Zero out AUX fields of newly created block during EH edge
2099 insertion. */
2100 for (; last < last_basic_block; last++)
2101 BASIC_BLOCK (last)->aux = NULL;
2102 entry_block_map->aux = NULL;
2103 exit_block_map->aux = NULL;
2105 if (id->eh_map)
2107 pointer_map_destroy (id->eh_map);
2108 id->eh_map = NULL;
2111 return new_fndecl;
2114 /* Copy the debug STMT using ID. We deal with these statements in a
2115 special way: if any variable in their VALUE expression wasn't
2116 remapped yet, we won't remap it, because that would get decl uids
2117 out of sync, causing codegen differences between -g and -g0. If
2118 this arises, we drop the VALUE expression altogether. */
2120 static void
2121 copy_debug_stmt (gimple stmt, copy_body_data *id)
2123 tree t, *n;
2124 struct walk_stmt_info wi;
2126 t = id->block;
2127 if (gimple_block (stmt))
2129 tree *n;
2130 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2131 if (n)
2132 t = *n;
2134 gimple_set_block (stmt, t);
2136 /* Remap all the operands in COPY. */
2137 memset (&wi, 0, sizeof (wi));
2138 wi.info = id;
2140 processing_debug_stmt = 1;
2142 t = gimple_debug_bind_get_var (stmt);
2144 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2145 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2147 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2148 t = *n;
2150 else
2151 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2153 gimple_debug_bind_set_var (stmt, t);
2155 if (gimple_debug_bind_has_value_p (stmt))
2156 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2157 remap_gimple_op_r, &wi, NULL);
2159 /* Punt if any decl couldn't be remapped. */
2160 if (processing_debug_stmt < 0)
2161 gimple_debug_bind_reset_value (stmt);
2163 processing_debug_stmt = 0;
2165 update_stmt (stmt);
2166 if (gimple_in_ssa_p (cfun))
2167 mark_symbols_for_renaming (stmt);
2170 /* Process deferred debug stmts. In order to give values better odds
2171 of being successfully remapped, we delay the processing of debug
2172 stmts until all other stmts that might require remapping are
2173 processed. */
2175 static void
2176 copy_debug_stmts (copy_body_data *id)
2178 size_t i;
2179 gimple stmt;
2181 if (!id->debug_stmts)
2182 return;
2184 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2185 copy_debug_stmt (stmt, id);
2187 VEC_free (gimple, heap, id->debug_stmts);
2190 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2191 another function. */
2193 static tree
2194 copy_tree_body (copy_body_data *id)
2196 tree fndecl = id->src_fn;
2197 tree body = DECL_SAVED_TREE (fndecl);
2199 walk_tree (&body, copy_tree_body_r, id, NULL);
2201 return body;
2204 /* Make a copy of the body of FN so that it can be inserted inline in
2205 another function. */
2207 static tree
2208 copy_body (copy_body_data *id, gcov_type count, int frequency,
2209 basic_block entry_block_map, basic_block exit_block_map)
2211 tree fndecl = id->src_fn;
2212 tree body;
2214 /* If this body has a CFG, walk CFG and copy. */
2215 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2216 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
2217 copy_debug_stmts (id);
2219 return body;
2222 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2223 defined in function FN, or of a data member thereof. */
2225 static bool
2226 self_inlining_addr_expr (tree value, tree fn)
2228 tree var;
2230 if (TREE_CODE (value) != ADDR_EXPR)
2231 return false;
2233 var = get_base_address (TREE_OPERAND (value, 0));
2235 return var && auto_var_in_fn_p (var, fn);
2238 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2239 lexical block and line number information from base_stmt, if given,
2240 or from the last stmt of the block otherwise. */
2242 static gimple
2243 insert_init_debug_bind (copy_body_data *id,
2244 basic_block bb, tree var, tree value,
2245 gimple base_stmt)
2247 gimple note;
2248 gimple_stmt_iterator gsi;
2249 tree tracked_var;
2251 if (!gimple_in_ssa_p (id->src_cfun))
2252 return NULL;
2254 if (!MAY_HAVE_DEBUG_STMTS)
2255 return NULL;
2257 tracked_var = target_for_debug_bind (var);
2258 if (!tracked_var)
2259 return NULL;
2261 if (bb)
2263 gsi = gsi_last_bb (bb);
2264 if (!base_stmt && !gsi_end_p (gsi))
2265 base_stmt = gsi_stmt (gsi);
2268 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2270 if (bb)
2272 if (!gsi_end_p (gsi))
2273 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2274 else
2275 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2278 return note;
2281 static void
2282 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2284 /* If VAR represents a zero-sized variable, it's possible that the
2285 assignment statement may result in no gimple statements. */
2286 if (init_stmt)
2288 gimple_stmt_iterator si = gsi_last_bb (bb);
2290 /* We can end up with init statements that store to a non-register
2291 from a rhs with a conversion. Handle that here by forcing the
2292 rhs into a temporary. gimple_regimplify_operands is not
2293 prepared to do this for us. */
2294 if (!is_gimple_debug (init_stmt)
2295 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2296 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2297 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2299 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2300 gimple_expr_type (init_stmt),
2301 gimple_assign_rhs1 (init_stmt));
2302 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2303 GSI_NEW_STMT);
2304 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2305 gimple_assign_set_rhs1 (init_stmt, rhs);
2307 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2308 gimple_regimplify_operands (init_stmt, &si);
2309 mark_symbols_for_renaming (init_stmt);
2311 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2313 tree var, def = gimple_assign_lhs (init_stmt);
2315 if (TREE_CODE (def) == SSA_NAME)
2316 var = SSA_NAME_VAR (def);
2317 else
2318 var = def;
2320 insert_init_debug_bind (id, bb, var, def, init_stmt);
2325 /* Initialize parameter P with VALUE. If needed, produce init statement
2326 at the end of BB. When BB is NULL, we return init statement to be
2327 output later. */
2328 static gimple
2329 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2330 basic_block bb, tree *vars)
2332 gimple init_stmt = NULL;
2333 tree var;
2334 tree rhs = value;
2335 tree def = (gimple_in_ssa_p (cfun)
2336 ? gimple_default_def (id->src_cfun, p) : NULL);
2338 if (value
2339 && value != error_mark_node
2340 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2342 if (fold_convertible_p (TREE_TYPE (p), value))
2343 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2344 else
2345 /* ??? For valid (GIMPLE) programs we should not end up here.
2346 Still if something has gone wrong and we end up with truly
2347 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2348 to not leak invalid GIMPLE to the following passes. */
2349 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2352 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2353 here since the type of this decl must be visible to the calling
2354 function. */
2355 var = copy_decl_to_var (p, id);
2357 /* We're actually using the newly-created var. */
2358 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2360 get_var_ann (var);
2361 add_referenced_var (var);
2364 /* Declare this new variable. */
2365 TREE_CHAIN (var) = *vars;
2366 *vars = var;
2368 /* Make gimplifier happy about this variable. */
2369 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2371 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2372 we would not need to create a new variable here at all, if it
2373 weren't for debug info. Still, we can just use the argument
2374 value. */
2375 if (TREE_READONLY (p)
2376 && !TREE_ADDRESSABLE (p)
2377 && value && !TREE_SIDE_EFFECTS (value)
2378 && !def)
2380 /* We may produce non-gimple trees by adding NOPs or introduce
2381 invalid sharing when operand is not really constant.
2382 It is not big deal to prohibit constant propagation here as
2383 we will constant propagate in DOM1 pass anyway. */
2384 if (is_gimple_min_invariant (value)
2385 && useless_type_conversion_p (TREE_TYPE (p),
2386 TREE_TYPE (value))
2387 /* We have to be very careful about ADDR_EXPR. Make sure
2388 the base variable isn't a local variable of the inlined
2389 function, e.g., when doing recursive inlining, direct or
2390 mutually-recursive or whatever, which is why we don't
2391 just test whether fn == current_function_decl. */
2392 && ! self_inlining_addr_expr (value, fn))
2394 insert_decl_map (id, p, value);
2395 insert_debug_decl_map (id, p, var);
2396 return insert_init_debug_bind (id, bb, var, value, NULL);
2400 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2401 that way, when the PARM_DECL is encountered, it will be
2402 automatically replaced by the VAR_DECL. */
2403 insert_decl_map (id, p, var);
2405 /* Even if P was TREE_READONLY, the new VAR should not be.
2406 In the original code, we would have constructed a
2407 temporary, and then the function body would have never
2408 changed the value of P. However, now, we will be
2409 constructing VAR directly. The constructor body may
2410 change its value multiple times as it is being
2411 constructed. Therefore, it must not be TREE_READONLY;
2412 the back-end assumes that TREE_READONLY variable is
2413 assigned to only once. */
2414 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2415 TREE_READONLY (var) = 0;
2417 /* If there is no setup required and we are in SSA, take the easy route
2418 replacing all SSA names representing the function parameter by the
2419 SSA name passed to function.
2421 We need to construct map for the variable anyway as it might be used
2422 in different SSA names when parameter is set in function.
2424 Do replacement at -O0 for const arguments replaced by constant.
2425 This is important for builtin_constant_p and other construct requiring
2426 constant argument to be visible in inlined function body. */
2427 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2428 && (optimize
2429 || (TREE_READONLY (p)
2430 && is_gimple_min_invariant (rhs)))
2431 && (TREE_CODE (rhs) == SSA_NAME
2432 || is_gimple_min_invariant (rhs))
2433 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2435 insert_decl_map (id, def, rhs);
2436 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2439 /* If the value of argument is never used, don't care about initializing
2440 it. */
2441 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2443 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2444 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2447 /* Initialize this VAR_DECL from the equivalent argument. Convert
2448 the argument to the proper type in case it was promoted. */
2449 if (value)
2451 if (rhs == error_mark_node)
2453 insert_decl_map (id, p, var);
2454 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2457 STRIP_USELESS_TYPE_CONVERSION (rhs);
2459 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2460 keep our trees in gimple form. */
2461 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2463 def = remap_ssa_name (def, id);
2464 init_stmt = gimple_build_assign (def, rhs);
2465 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2466 set_default_def (var, NULL);
2468 else
2469 init_stmt = gimple_build_assign (var, rhs);
2471 if (bb && init_stmt)
2472 insert_init_stmt (id, bb, init_stmt);
2474 return init_stmt;
2477 /* Generate code to initialize the parameters of the function at the
2478 top of the stack in ID from the GIMPLE_CALL STMT. */
2480 static void
2481 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2482 tree fn, basic_block bb)
2484 tree parms;
2485 size_t i;
2486 tree p;
2487 tree vars = NULL_TREE;
2488 tree static_chain = gimple_call_chain (stmt);
2490 /* Figure out what the parameters are. */
2491 parms = DECL_ARGUMENTS (fn);
2493 /* Loop through the parameter declarations, replacing each with an
2494 equivalent VAR_DECL, appropriately initialized. */
2495 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2497 tree val;
2498 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2499 setup_one_parameter (id, p, val, fn, bb, &vars);
2502 /* Initialize the static chain. */
2503 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2504 gcc_assert (fn != current_function_decl);
2505 if (p)
2507 /* No static chain? Seems like a bug in tree-nested.c. */
2508 gcc_assert (static_chain);
2510 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2513 declare_inline_vars (id->block, vars);
2517 /* Declare a return variable to replace the RESULT_DECL for the
2518 function we are calling. An appropriate DECL_STMT is returned.
2519 The USE_STMT is filled to contain a use of the declaration to
2520 indicate the return value of the function.
2522 RETURN_SLOT, if non-null is place where to store the result. It
2523 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2524 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2526 The return value is a (possibly null) value that is the result of the
2527 function as seen by the callee. *USE_P is a (possibly null) value that
2528 holds the result as seen by the caller. */
2530 static tree
2531 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2532 tree *use_p)
2534 tree callee = id->src_fn;
2535 tree caller = id->dst_fn;
2536 tree result = DECL_RESULT (callee);
2537 tree callee_type = TREE_TYPE (result);
2538 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
2539 tree var, use;
2541 /* We don't need to do anything for functions that don't return
2542 anything. */
2543 if (!result || VOID_TYPE_P (callee_type))
2545 *use_p = NULL_TREE;
2546 return NULL_TREE;
2549 /* If there was a return slot, then the return value is the
2550 dereferenced address of that object. */
2551 if (return_slot)
2553 /* The front end shouldn't have used both return_slot and
2554 a modify expression. */
2555 gcc_assert (!modify_dest);
2556 if (DECL_BY_REFERENCE (result))
2558 tree return_slot_addr = build_fold_addr_expr (return_slot);
2559 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2561 /* We are going to construct *&return_slot and we can't do that
2562 for variables believed to be not addressable.
2564 FIXME: This check possibly can match, because values returned
2565 via return slot optimization are not believed to have address
2566 taken by alias analysis. */
2567 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2568 if (gimple_in_ssa_p (cfun))
2570 HOST_WIDE_INT bitsize;
2571 HOST_WIDE_INT bitpos;
2572 tree offset;
2573 enum machine_mode mode;
2574 int unsignedp;
2575 int volatilep;
2576 tree base;
2577 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2578 &offset,
2579 &mode, &unsignedp, &volatilep,
2580 false);
2581 if (TREE_CODE (base) == INDIRECT_REF)
2582 base = TREE_OPERAND (base, 0);
2583 if (TREE_CODE (base) == SSA_NAME)
2584 base = SSA_NAME_VAR (base);
2585 mark_sym_for_renaming (base);
2587 var = return_slot_addr;
2589 else
2591 var = return_slot;
2592 gcc_assert (TREE_CODE (var) != SSA_NAME);
2593 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2595 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2596 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2597 && !DECL_GIMPLE_REG_P (result)
2598 && DECL_P (var))
2599 DECL_GIMPLE_REG_P (var) = 0;
2600 use = NULL;
2601 goto done;
2604 /* All types requiring non-trivial constructors should have been handled. */
2605 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2607 /* Attempt to avoid creating a new temporary variable. */
2608 if (modify_dest
2609 && TREE_CODE (modify_dest) != SSA_NAME)
2611 bool use_it = false;
2613 /* We can't use MODIFY_DEST if there's type promotion involved. */
2614 if (!useless_type_conversion_p (callee_type, caller_type))
2615 use_it = false;
2617 /* ??? If we're assigning to a variable sized type, then we must
2618 reuse the destination variable, because we've no good way to
2619 create variable sized temporaries at this point. */
2620 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2621 use_it = true;
2623 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2624 reuse it as the result of the call directly. Don't do this if
2625 it would promote MODIFY_DEST to addressable. */
2626 else if (TREE_ADDRESSABLE (result))
2627 use_it = false;
2628 else
2630 tree base_m = get_base_address (modify_dest);
2632 /* If the base isn't a decl, then it's a pointer, and we don't
2633 know where that's going to go. */
2634 if (!DECL_P (base_m))
2635 use_it = false;
2636 else if (is_global_var (base_m))
2637 use_it = false;
2638 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2639 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2640 && !DECL_GIMPLE_REG_P (result)
2641 && DECL_GIMPLE_REG_P (base_m))
2642 use_it = false;
2643 else if (!TREE_ADDRESSABLE (base_m))
2644 use_it = true;
2647 if (use_it)
2649 var = modify_dest;
2650 use = NULL;
2651 goto done;
2655 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2657 var = copy_result_decl_to_var (result, id);
2658 if (gimple_in_ssa_p (cfun))
2660 get_var_ann (var);
2661 add_referenced_var (var);
2664 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2665 DECL_STRUCT_FUNCTION (caller)->local_decls
2666 = tree_cons (NULL_TREE, var,
2667 DECL_STRUCT_FUNCTION (caller)->local_decls);
2669 /* Do not have the rest of GCC warn about this variable as it should
2670 not be visible to the user. */
2671 TREE_NO_WARNING (var) = 1;
2673 declare_inline_vars (id->block, var);
2675 /* Build the use expr. If the return type of the function was
2676 promoted, convert it back to the expected type. */
2677 use = var;
2678 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2679 use = fold_convert (caller_type, var);
2681 STRIP_USELESS_TYPE_CONVERSION (use);
2683 if (DECL_BY_REFERENCE (result))
2685 TREE_ADDRESSABLE (var) = 1;
2686 var = build_fold_addr_expr (var);
2689 done:
2690 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2691 way, when the RESULT_DECL is encountered, it will be
2692 automatically replaced by the VAR_DECL. */
2693 insert_decl_map (id, result, var);
2695 /* Remember this so we can ignore it in remap_decls. */
2696 id->retvar = var;
2698 *use_p = use;
2699 return var;
2702 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2703 to a local label. */
2705 static tree
2706 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2708 tree node = *nodep;
2709 tree fn = (tree) fnp;
2711 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2712 return node;
2714 if (TYPE_P (node))
2715 *walk_subtrees = 0;
2717 return NULL_TREE;
2720 /* Callback through walk_tree. Determine if we've got an aggregate
2721 type that we can't support; return non-null if so. */
2723 static tree
2724 cannot_copy_type_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
2725 void *data ATTRIBUTE_UNUSED)
2727 tree t, node = *nodep;
2729 if (TREE_CODE (node) == RECORD_TYPE || TREE_CODE (node) == UNION_TYPE)
2731 /* We cannot inline a function of the form
2733 void F (int i) { struct S { int ar[i]; } s; }
2735 Attempting to do so produces a catch-22.
2736 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
2737 UNION_TYPE nodes, then it goes into infinite recursion on a
2738 structure containing a pointer to its own type. If it doesn't,
2739 then the type node for S doesn't get adjusted properly when
2740 F is inlined.
2742 ??? This is likely no longer true, but it's too late in the 4.0
2743 cycle to try to find out. This should be checked for 4.1. */
2744 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
2745 if (variably_modified_type_p (TREE_TYPE (t), NULL))
2746 return node;
2749 return NULL_TREE;
2753 /* Determine if the function can be copied. If so return NULL. If
2754 not return a string describng the reason for failure. */
2756 static const char *
2757 copy_forbidden (struct function *fun, tree fndecl)
2759 const char *reason = fun->cannot_be_copied_reason;
2760 tree step;
2762 /* Only examine the function once. */
2763 if (fun->cannot_be_copied_set)
2764 return reason;
2766 /* We cannot copy a function that receives a non-local goto
2767 because we cannot remap the destination label used in the
2768 function that is performing the non-local goto. */
2769 /* ??? Actually, this should be possible, if we work at it.
2770 No doubt there's just a handful of places that simply
2771 assume it doesn't happen and don't substitute properly. */
2772 if (fun->has_nonlocal_label)
2774 reason = G_("function %q+F can never be copied "
2775 "because it receives a non-local goto");
2776 goto fail;
2779 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2781 tree decl = TREE_VALUE (step);
2783 if (TREE_CODE (decl) == VAR_DECL
2784 && TREE_STATIC (decl)
2785 && !DECL_EXTERNAL (decl)
2786 && DECL_INITIAL (decl)
2787 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2788 has_label_address_in_static_1,
2789 fndecl))
2791 reason = G_("function %q+F can never be copied because it saves "
2792 "address of local label in a static variable");
2793 goto fail;
2796 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2797 && variably_modified_type_p (TREE_TYPE (decl), NULL)
2798 && walk_tree_without_duplicates (&TREE_TYPE (decl),
2799 cannot_copy_type_1, NULL))
2801 reason = G_("function %q+F can never be copied "
2802 "because it uses variable sized variables");
2803 goto fail;
2807 fail:
2808 fun->cannot_be_copied_reason = reason;
2809 fun->cannot_be_copied_set = true;
2810 return reason;
2814 static const char *inline_forbidden_reason;
2816 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2817 iff a function can not be inlined. Also sets the reason why. */
2819 static tree
2820 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2821 struct walk_stmt_info *wip)
2823 tree fn = (tree) wip->info;
2824 tree t;
2825 gimple stmt = gsi_stmt (*gsi);
2827 switch (gimple_code (stmt))
2829 case GIMPLE_CALL:
2830 /* Refuse to inline alloca call unless user explicitly forced so as
2831 this may change program's memory overhead drastically when the
2832 function using alloca is called in loop. In GCC present in
2833 SPEC2000 inlining into schedule_block cause it to require 2GB of
2834 RAM instead of 256MB. */
2835 if (gimple_alloca_call_p (stmt)
2836 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2838 inline_forbidden_reason
2839 = G_("function %q+F can never be inlined because it uses "
2840 "alloca (override using the always_inline attribute)");
2841 *handled_ops_p = true;
2842 return fn;
2845 t = gimple_call_fndecl (stmt);
2846 if (t == NULL_TREE)
2847 break;
2849 /* We cannot inline functions that call setjmp. */
2850 if (setjmp_call_p (t))
2852 inline_forbidden_reason
2853 = G_("function %q+F can never be inlined because it uses setjmp");
2854 *handled_ops_p = true;
2855 return t;
2858 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2859 switch (DECL_FUNCTION_CODE (t))
2861 /* We cannot inline functions that take a variable number of
2862 arguments. */
2863 case BUILT_IN_VA_START:
2864 case BUILT_IN_NEXT_ARG:
2865 case BUILT_IN_VA_END:
2866 inline_forbidden_reason
2867 = G_("function %q+F can never be inlined because it "
2868 "uses variable argument lists");
2869 *handled_ops_p = true;
2870 return t;
2872 case BUILT_IN_LONGJMP:
2873 /* We can't inline functions that call __builtin_longjmp at
2874 all. The non-local goto machinery really requires the
2875 destination be in a different function. If we allow the
2876 function calling __builtin_longjmp to be inlined into the
2877 function calling __builtin_setjmp, Things will Go Awry. */
2878 inline_forbidden_reason
2879 = G_("function %q+F can never be inlined because "
2880 "it uses setjmp-longjmp exception handling");
2881 *handled_ops_p = true;
2882 return t;
2884 case BUILT_IN_NONLOCAL_GOTO:
2885 /* Similarly. */
2886 inline_forbidden_reason
2887 = G_("function %q+F can never be inlined because "
2888 "it uses non-local goto");
2889 *handled_ops_p = true;
2890 return t;
2892 case BUILT_IN_RETURN:
2893 case BUILT_IN_APPLY_ARGS:
2894 /* If a __builtin_apply_args caller would be inlined,
2895 it would be saving arguments of the function it has
2896 been inlined into. Similarly __builtin_return would
2897 return from the function the inline has been inlined into. */
2898 inline_forbidden_reason
2899 = G_("function %q+F can never be inlined because "
2900 "it uses __builtin_return or __builtin_apply_args");
2901 *handled_ops_p = true;
2902 return t;
2904 default:
2905 break;
2907 break;
2909 case GIMPLE_GOTO:
2910 t = gimple_goto_dest (stmt);
2912 /* We will not inline a function which uses computed goto. The
2913 addresses of its local labels, which may be tucked into
2914 global storage, are of course not constant across
2915 instantiations, which causes unexpected behavior. */
2916 if (TREE_CODE (t) != LABEL_DECL)
2918 inline_forbidden_reason
2919 = G_("function %q+F can never be inlined "
2920 "because it contains a computed goto");
2921 *handled_ops_p = true;
2922 return t;
2924 break;
2926 default:
2927 break;
2930 *handled_ops_p = false;
2931 return NULL_TREE;
2934 /* Return true if FNDECL is a function that cannot be inlined into
2935 another one. */
2937 static bool
2938 inline_forbidden_p (tree fndecl)
2940 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2941 struct walk_stmt_info wi;
2942 struct pointer_set_t *visited_nodes;
2943 basic_block bb;
2944 bool forbidden_p = false;
2946 /* First check for shared reasons not to copy the code. */
2947 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2948 if (inline_forbidden_reason != NULL)
2949 return true;
2951 /* Next, walk the statements of the function looking for
2952 constraucts we can't handle, or are non-optimal for inlining. */
2953 visited_nodes = pointer_set_create ();
2954 memset (&wi, 0, sizeof (wi));
2955 wi.info = (void *) fndecl;
2956 wi.pset = visited_nodes;
2958 FOR_EACH_BB_FN (bb, fun)
2960 gimple ret;
2961 gimple_seq seq = bb_seq (bb);
2962 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
2963 forbidden_p = (ret != NULL);
2964 if (forbidden_p)
2965 break;
2968 pointer_set_destroy (visited_nodes);
2969 return forbidden_p;
2972 /* Returns nonzero if FN is a function that does not have any
2973 fundamental inline blocking properties. */
2975 bool
2976 tree_inlinable_function_p (tree fn)
2978 bool inlinable = true;
2979 bool do_warning;
2980 tree always_inline;
2982 /* If we've already decided this function shouldn't be inlined,
2983 there's no need to check again. */
2984 if (DECL_UNINLINABLE (fn))
2985 return false;
2987 /* We only warn for functions declared `inline' by the user. */
2988 do_warning = (warn_inline
2989 && DECL_DECLARED_INLINE_P (fn)
2990 && !DECL_NO_INLINE_WARNING_P (fn)
2991 && !DECL_IN_SYSTEM_HEADER (fn));
2993 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2995 if (flag_no_inline
2996 && always_inline == NULL)
2998 if (do_warning)
2999 warning (OPT_Winline, "function %q+F can never be inlined because it "
3000 "is suppressed using -fno-inline", fn);
3001 inlinable = false;
3004 /* Don't auto-inline anything that might not be bound within
3005 this unit of translation. */
3006 else if (!DECL_DECLARED_INLINE_P (fn)
3007 && DECL_REPLACEABLE_P (fn))
3008 inlinable = false;
3010 else if (!function_attribute_inlinable_p (fn))
3012 if (do_warning)
3013 warning (OPT_Winline, "function %q+F can never be inlined because it "
3014 "uses attributes conflicting with inlining", fn);
3015 inlinable = false;
3018 else if (inline_forbidden_p (fn))
3020 /* See if we should warn about uninlinable functions. Previously,
3021 some of these warnings would be issued while trying to expand
3022 the function inline, but that would cause multiple warnings
3023 about functions that would for example call alloca. But since
3024 this a property of the function, just one warning is enough.
3025 As a bonus we can now give more details about the reason why a
3026 function is not inlinable. */
3027 if (always_inline)
3028 sorry (inline_forbidden_reason, fn);
3029 else if (do_warning)
3030 warning (OPT_Winline, inline_forbidden_reason, fn);
3032 inlinable = false;
3035 /* Squirrel away the result so that we don't have to check again. */
3036 DECL_UNINLINABLE (fn) = !inlinable;
3038 return inlinable;
3041 /* Estimate the cost of a memory move. Use machine dependent
3042 word size and take possible memcpy call into account. */
3045 estimate_move_cost (tree type)
3047 HOST_WIDE_INT size;
3049 gcc_assert (!VOID_TYPE_P (type));
3051 size = int_size_in_bytes (type);
3053 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3054 /* Cost of a memcpy call, 3 arguments and the call. */
3055 return 4;
3056 else
3057 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3060 /* Returns cost of operation CODE, according to WEIGHTS */
3062 static int
3063 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3064 tree op1 ATTRIBUTE_UNUSED, tree op2)
3066 switch (code)
3068 /* These are "free" conversions, or their presumed cost
3069 is folded into other operations. */
3070 case RANGE_EXPR:
3071 CASE_CONVERT:
3072 case COMPLEX_EXPR:
3073 case PAREN_EXPR:
3074 return 0;
3076 /* Assign cost of 1 to usual operations.
3077 ??? We may consider mapping RTL costs to this. */
3078 case COND_EXPR:
3079 case VEC_COND_EXPR:
3081 case PLUS_EXPR:
3082 case POINTER_PLUS_EXPR:
3083 case MINUS_EXPR:
3084 case MULT_EXPR:
3086 case ADDR_SPACE_CONVERT_EXPR:
3087 case FIXED_CONVERT_EXPR:
3088 case FIX_TRUNC_EXPR:
3090 case NEGATE_EXPR:
3091 case FLOAT_EXPR:
3092 case MIN_EXPR:
3093 case MAX_EXPR:
3094 case ABS_EXPR:
3096 case LSHIFT_EXPR:
3097 case RSHIFT_EXPR:
3098 case LROTATE_EXPR:
3099 case RROTATE_EXPR:
3100 case VEC_LSHIFT_EXPR:
3101 case VEC_RSHIFT_EXPR:
3103 case BIT_IOR_EXPR:
3104 case BIT_XOR_EXPR:
3105 case BIT_AND_EXPR:
3106 case BIT_NOT_EXPR:
3108 case TRUTH_ANDIF_EXPR:
3109 case TRUTH_ORIF_EXPR:
3110 case TRUTH_AND_EXPR:
3111 case TRUTH_OR_EXPR:
3112 case TRUTH_XOR_EXPR:
3113 case TRUTH_NOT_EXPR:
3115 case LT_EXPR:
3116 case LE_EXPR:
3117 case GT_EXPR:
3118 case GE_EXPR:
3119 case EQ_EXPR:
3120 case NE_EXPR:
3121 case ORDERED_EXPR:
3122 case UNORDERED_EXPR:
3124 case UNLT_EXPR:
3125 case UNLE_EXPR:
3126 case UNGT_EXPR:
3127 case UNGE_EXPR:
3128 case UNEQ_EXPR:
3129 case LTGT_EXPR:
3131 case CONJ_EXPR:
3133 case PREDECREMENT_EXPR:
3134 case PREINCREMENT_EXPR:
3135 case POSTDECREMENT_EXPR:
3136 case POSTINCREMENT_EXPR:
3138 case REALIGN_LOAD_EXPR:
3140 case REDUC_MAX_EXPR:
3141 case REDUC_MIN_EXPR:
3142 case REDUC_PLUS_EXPR:
3143 case WIDEN_SUM_EXPR:
3144 case WIDEN_MULT_EXPR:
3145 case DOT_PROD_EXPR:
3147 case VEC_WIDEN_MULT_HI_EXPR:
3148 case VEC_WIDEN_MULT_LO_EXPR:
3149 case VEC_UNPACK_HI_EXPR:
3150 case VEC_UNPACK_LO_EXPR:
3151 case VEC_UNPACK_FLOAT_HI_EXPR:
3152 case VEC_UNPACK_FLOAT_LO_EXPR:
3153 case VEC_PACK_TRUNC_EXPR:
3154 case VEC_PACK_SAT_EXPR:
3155 case VEC_PACK_FIX_TRUNC_EXPR:
3156 case VEC_EXTRACT_EVEN_EXPR:
3157 case VEC_EXTRACT_ODD_EXPR:
3158 case VEC_INTERLEAVE_HIGH_EXPR:
3159 case VEC_INTERLEAVE_LOW_EXPR:
3161 return 1;
3163 /* Few special cases of expensive operations. This is useful
3164 to avoid inlining on functions having too many of these. */
3165 case TRUNC_DIV_EXPR:
3166 case CEIL_DIV_EXPR:
3167 case FLOOR_DIV_EXPR:
3168 case ROUND_DIV_EXPR:
3169 case EXACT_DIV_EXPR:
3170 case TRUNC_MOD_EXPR:
3171 case CEIL_MOD_EXPR:
3172 case FLOOR_MOD_EXPR:
3173 case ROUND_MOD_EXPR:
3174 case RDIV_EXPR:
3175 if (TREE_CODE (op2) != INTEGER_CST)
3176 return weights->div_mod_cost;
3177 return 1;
3179 default:
3180 /* We expect a copy assignment with no operator. */
3181 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3182 return 0;
3187 /* Estimate number of instructions that will be created by expanding
3188 the statements in the statement sequence STMTS.
3189 WEIGHTS contains weights attributed to various constructs. */
3191 static
3192 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3194 int cost;
3195 gimple_stmt_iterator gsi;
3197 cost = 0;
3198 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3199 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3201 return cost;
3205 /* Estimate number of instructions that will be created by expanding STMT.
3206 WEIGHTS contains weights attributed to various constructs. */
3209 estimate_num_insns (gimple stmt, eni_weights *weights)
3211 unsigned cost, i;
3212 enum gimple_code code = gimple_code (stmt);
3213 tree lhs;
3214 tree rhs;
3216 switch (code)
3218 case GIMPLE_ASSIGN:
3219 /* Try to estimate the cost of assignments. We have three cases to
3220 deal with:
3221 1) Simple assignments to registers;
3222 2) Stores to things that must live in memory. This includes
3223 "normal" stores to scalars, but also assignments of large
3224 structures, or constructors of big arrays;
3226 Let us look at the first two cases, assuming we have "a = b + C":
3227 <GIMPLE_ASSIGN <var_decl "a">
3228 <plus_expr <var_decl "b"> <constant C>>
3229 If "a" is a GIMPLE register, the assignment to it is free on almost
3230 any target, because "a" usually ends up in a real register. Hence
3231 the only cost of this expression comes from the PLUS_EXPR, and we
3232 can ignore the GIMPLE_ASSIGN.
3233 If "a" is not a GIMPLE register, the assignment to "a" will most
3234 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3235 of moving something into "a", which we compute using the function
3236 estimate_move_cost. */
3237 lhs = gimple_assign_lhs (stmt);
3238 rhs = gimple_assign_rhs1 (stmt);
3240 if (is_gimple_reg (lhs))
3241 cost = 0;
3242 else
3243 cost = estimate_move_cost (TREE_TYPE (lhs));
3245 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3246 cost += estimate_move_cost (TREE_TYPE (rhs));
3248 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3249 gimple_assign_rhs1 (stmt),
3250 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3251 == GIMPLE_BINARY_RHS
3252 ? gimple_assign_rhs2 (stmt) : NULL);
3253 break;
3255 case GIMPLE_COND:
3256 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3257 gimple_op (stmt, 0),
3258 gimple_op (stmt, 1));
3259 break;
3261 case GIMPLE_SWITCH:
3262 /* Take into account cost of the switch + guess 2 conditional jumps for
3263 each case label.
3265 TODO: once the switch expansion logic is sufficiently separated, we can
3266 do better job on estimating cost of the switch. */
3267 if (weights->time_based)
3268 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3269 else
3270 cost = gimple_switch_num_labels (stmt) * 2;
3271 break;
3273 case GIMPLE_CALL:
3275 tree decl = gimple_call_fndecl (stmt);
3276 tree addr = gimple_call_fn (stmt);
3277 tree funtype = TREE_TYPE (addr);
3279 if (POINTER_TYPE_P (funtype))
3280 funtype = TREE_TYPE (funtype);
3282 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3283 cost = weights->target_builtin_call_cost;
3284 else
3285 cost = weights->call_cost;
3287 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3288 switch (DECL_FUNCTION_CODE (decl))
3290 case BUILT_IN_CONSTANT_P:
3291 return 0;
3292 case BUILT_IN_EXPECT:
3293 return 0;
3295 /* Prefetch instruction is not expensive. */
3296 case BUILT_IN_PREFETCH:
3297 cost = weights->target_builtin_call_cost;
3298 break;
3300 default:
3301 break;
3304 if (decl)
3305 funtype = TREE_TYPE (decl);
3307 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3308 cost += estimate_move_cost (TREE_TYPE (funtype));
3309 /* Our cost must be kept in sync with
3310 cgraph_estimate_size_after_inlining that does use function
3311 declaration to figure out the arguments. */
3312 if (decl && DECL_ARGUMENTS (decl))
3314 tree arg;
3315 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3316 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3317 cost += estimate_move_cost (TREE_TYPE (arg));
3319 else if (funtype && prototype_p (funtype))
3321 tree t;
3322 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3323 t = TREE_CHAIN (t))
3324 if (!VOID_TYPE_P (TREE_VALUE (t)))
3325 cost += estimate_move_cost (TREE_VALUE (t));
3327 else
3329 for (i = 0; i < gimple_call_num_args (stmt); i++)
3331 tree arg = gimple_call_arg (stmt, i);
3332 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3333 cost += estimate_move_cost (TREE_TYPE (arg));
3337 break;
3340 case GIMPLE_GOTO:
3341 case GIMPLE_LABEL:
3342 case GIMPLE_NOP:
3343 case GIMPLE_PHI:
3344 case GIMPLE_RETURN:
3345 case GIMPLE_PREDICT:
3346 case GIMPLE_DEBUG:
3347 return 0;
3349 case GIMPLE_ASM:
3350 return asm_str_count (gimple_asm_string (stmt));
3352 case GIMPLE_RESX:
3353 /* This is either going to be an external function call with one
3354 argument, or two register copy statements plus a goto. */
3355 return 2;
3357 case GIMPLE_EH_DISPATCH:
3358 /* ??? This is going to turn into a switch statement. Ideally
3359 we'd have a look at the eh region and estimate the number of
3360 edges involved. */
3361 return 10;
3363 case GIMPLE_BIND:
3364 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3366 case GIMPLE_EH_FILTER:
3367 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3369 case GIMPLE_CATCH:
3370 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3372 case GIMPLE_TRY:
3373 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3374 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3376 /* OpenMP directives are generally very expensive. */
3378 case GIMPLE_OMP_RETURN:
3379 case GIMPLE_OMP_SECTIONS_SWITCH:
3380 case GIMPLE_OMP_ATOMIC_STORE:
3381 case GIMPLE_OMP_CONTINUE:
3382 /* ...except these, which are cheap. */
3383 return 0;
3385 case GIMPLE_OMP_ATOMIC_LOAD:
3386 return weights->omp_cost;
3388 case GIMPLE_OMP_FOR:
3389 return (weights->omp_cost
3390 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3391 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3393 case GIMPLE_OMP_PARALLEL:
3394 case GIMPLE_OMP_TASK:
3395 case GIMPLE_OMP_CRITICAL:
3396 case GIMPLE_OMP_MASTER:
3397 case GIMPLE_OMP_ORDERED:
3398 case GIMPLE_OMP_SECTION:
3399 case GIMPLE_OMP_SECTIONS:
3400 case GIMPLE_OMP_SINGLE:
3401 return (weights->omp_cost
3402 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3404 default:
3405 gcc_unreachable ();
3408 return cost;
3411 /* Estimate number of instructions that will be created by expanding
3412 function FNDECL. WEIGHTS contains weights attributed to various
3413 constructs. */
3416 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3418 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3419 gimple_stmt_iterator bsi;
3420 basic_block bb;
3421 int n = 0;
3423 gcc_assert (my_function && my_function->cfg);
3424 FOR_EACH_BB_FN (bb, my_function)
3426 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3427 n += estimate_num_insns (gsi_stmt (bsi), weights);
3430 return n;
3434 /* Initializes weights used by estimate_num_insns. */
3436 void
3437 init_inline_once (void)
3439 eni_size_weights.call_cost = 1;
3440 eni_size_weights.target_builtin_call_cost = 1;
3441 eni_size_weights.div_mod_cost = 1;
3442 eni_size_weights.omp_cost = 40;
3443 eni_size_weights.time_based = false;
3445 /* Estimating time for call is difficult, since we have no idea what the
3446 called function does. In the current uses of eni_time_weights,
3447 underestimating the cost does less harm than overestimating it, so
3448 we choose a rather small value here. */
3449 eni_time_weights.call_cost = 10;
3450 eni_time_weights.target_builtin_call_cost = 10;
3451 eni_time_weights.div_mod_cost = 10;
3452 eni_time_weights.omp_cost = 40;
3453 eni_time_weights.time_based = true;
3456 /* Estimate the number of instructions in a gimple_seq. */
3459 count_insns_seq (gimple_seq seq, eni_weights *weights)
3461 gimple_stmt_iterator gsi;
3462 int n = 0;
3463 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3464 n += estimate_num_insns (gsi_stmt (gsi), weights);
3466 return n;
3470 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3472 static void
3473 prepend_lexical_block (tree current_block, tree new_block)
3475 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3476 BLOCK_SUBBLOCKS (current_block) = new_block;
3477 BLOCK_SUPERCONTEXT (new_block) = current_block;
3480 /* Fetch callee declaration from the call graph edge going from NODE and
3481 associated with STMR call statement. Return NULL_TREE if not found. */
3482 static tree
3483 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3485 struct cgraph_edge *cs;
3487 cs = cgraph_edge (node, stmt);
3488 if (cs)
3489 return cs->callee->decl;
3491 return NULL_TREE;
3494 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3496 static bool
3497 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3499 tree retvar, use_retvar;
3500 tree fn;
3501 struct pointer_map_t *st, *dst;
3502 tree return_slot;
3503 tree modify_dest;
3504 location_t saved_location;
3505 struct cgraph_edge *cg_edge;
3506 cgraph_inline_failed_t reason;
3507 basic_block return_block;
3508 edge e;
3509 gimple_stmt_iterator gsi, stmt_gsi;
3510 bool successfully_inlined = FALSE;
3511 bool purge_dead_abnormal_edges;
3512 tree t_step;
3513 tree var;
3515 /* Set input_location here so we get the right instantiation context
3516 if we call instantiate_decl from inlinable_function_p. */
3517 saved_location = input_location;
3518 if (gimple_has_location (stmt))
3519 input_location = gimple_location (stmt);
3521 /* From here on, we're only interested in CALL_EXPRs. */
3522 if (gimple_code (stmt) != GIMPLE_CALL)
3523 goto egress;
3525 /* First, see if we can figure out what function is being called.
3526 If we cannot, then there is no hope of inlining the function. */
3527 fn = gimple_call_fndecl (stmt);
3528 if (!fn)
3530 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3531 if (!fn)
3532 goto egress;
3535 /* Turn forward declarations into real ones. */
3536 fn = cgraph_node (fn)->decl;
3538 /* If FN is a declaration of a function in a nested scope that was
3539 globally declared inline, we don't set its DECL_INITIAL.
3540 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3541 C++ front-end uses it for cdtors to refer to their internal
3542 declarations, that are not real functions. Fortunately those
3543 don't have trees to be saved, so we can tell by checking their
3544 gimple_body. */
3545 if (!DECL_INITIAL (fn)
3546 && DECL_ABSTRACT_ORIGIN (fn)
3547 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3548 fn = DECL_ABSTRACT_ORIGIN (fn);
3550 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3551 Kill this check once this is fixed. */
3552 if (!id->dst_node->analyzed)
3553 goto egress;
3555 cg_edge = cgraph_edge (id->dst_node, stmt);
3557 /* Don't inline functions with different EH personalities. */
3558 if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3559 && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
3560 && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3561 != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
3562 goto egress;
3564 /* Don't try to inline functions that are not well-suited to
3565 inlining. */
3566 if (!cgraph_inline_p (cg_edge, &reason))
3568 /* If this call was originally indirect, we do not want to emit any
3569 inlining related warnings or sorry messages because there are no
3570 guarantees regarding those. */
3571 if (cg_edge->indirect_call)
3572 goto egress;
3574 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3575 /* Avoid warnings during early inline pass. */
3576 && cgraph_global_info_ready)
3578 sorry ("inlining failed in call to %q+F: %s", fn,
3579 cgraph_inline_failed_string (reason));
3580 sorry ("called from here");
3582 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3583 && !DECL_IN_SYSTEM_HEADER (fn)
3584 && reason != CIF_UNSPECIFIED
3585 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3586 /* Avoid warnings during early inline pass. */
3587 && cgraph_global_info_ready)
3589 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3590 fn, cgraph_inline_failed_string (reason));
3591 warning (OPT_Winline, "called from here");
3593 goto egress;
3595 fn = cg_edge->callee->decl;
3597 #ifdef ENABLE_CHECKING
3598 if (cg_edge->callee->decl != id->dst_node->decl)
3599 verify_cgraph_node (cg_edge->callee);
3600 #endif
3602 /* We will be inlining this callee. */
3603 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3605 /* Update the callers EH personality. */
3606 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3607 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3608 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3610 /* Split the block holding the GIMPLE_CALL. */
3611 e = split_block (bb, stmt);
3612 bb = e->src;
3613 return_block = e->dest;
3614 remove_edge (e);
3616 /* split_block splits after the statement; work around this by
3617 moving the call into the second block manually. Not pretty,
3618 but seems easier than doing the CFG manipulation by hand
3619 when the GIMPLE_CALL is in the last statement of BB. */
3620 stmt_gsi = gsi_last_bb (bb);
3621 gsi_remove (&stmt_gsi, false);
3623 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3624 been the source of abnormal edges. In this case, schedule
3625 the removal of dead abnormal edges. */
3626 gsi = gsi_start_bb (return_block);
3627 if (gsi_end_p (gsi))
3629 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3630 purge_dead_abnormal_edges = true;
3632 else
3634 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3635 purge_dead_abnormal_edges = false;
3638 stmt_gsi = gsi_start_bb (return_block);
3640 /* Build a block containing code to initialize the arguments, the
3641 actual inline expansion of the body, and a label for the return
3642 statements within the function to jump to. The type of the
3643 statement expression is the return type of the function call. */
3644 id->block = make_node (BLOCK);
3645 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3646 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3647 prepend_lexical_block (gimple_block (stmt), id->block);
3649 /* Local declarations will be replaced by their equivalents in this
3650 map. */
3651 st = id->decl_map;
3652 id->decl_map = pointer_map_create ();
3653 dst = id->debug_map;
3654 id->debug_map = NULL;
3656 /* Record the function we are about to inline. */
3657 id->src_fn = fn;
3658 id->src_node = cg_edge->callee;
3659 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3660 id->gimple_call = stmt;
3662 gcc_assert (!id->src_cfun->after_inlining);
3664 id->entry_bb = bb;
3665 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3667 gimple_stmt_iterator si = gsi_last_bb (bb);
3668 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3669 NOT_TAKEN),
3670 GSI_NEW_STMT);
3672 initialize_inlined_parameters (id, stmt, fn, bb);
3674 if (DECL_INITIAL (fn))
3675 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3677 /* Return statements in the function body will be replaced by jumps
3678 to the RET_LABEL. */
3679 gcc_assert (DECL_INITIAL (fn));
3680 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3682 /* Find the LHS to which the result of this call is assigned. */
3683 return_slot = NULL;
3684 if (gimple_call_lhs (stmt))
3686 modify_dest = gimple_call_lhs (stmt);
3688 /* The function which we are inlining might not return a value,
3689 in which case we should issue a warning that the function
3690 does not return a value. In that case the optimizers will
3691 see that the variable to which the value is assigned was not
3692 initialized. We do not want to issue a warning about that
3693 uninitialized variable. */
3694 if (DECL_P (modify_dest))
3695 TREE_NO_WARNING (modify_dest) = 1;
3697 if (gimple_call_return_slot_opt_p (stmt))
3699 return_slot = modify_dest;
3700 modify_dest = NULL;
3703 else
3704 modify_dest = NULL;
3706 /* If we are inlining a call to the C++ operator new, we don't want
3707 to use type based alias analysis on the return value. Otherwise
3708 we may get confused if the compiler sees that the inlined new
3709 function returns a pointer which was just deleted. See bug
3710 33407. */
3711 if (DECL_IS_OPERATOR_NEW (fn))
3713 return_slot = NULL;
3714 modify_dest = NULL;
3717 /* Declare the return variable for the function. */
3718 retvar = declare_return_variable (id, return_slot, modify_dest, &use_retvar);
3720 /* Add local vars in this inlined callee to caller. */
3721 t_step = id->src_cfun->local_decls;
3722 for (; t_step; t_step = TREE_CHAIN (t_step))
3724 var = TREE_VALUE (t_step);
3725 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3727 if (var_ann (var) && add_referenced_var (var))
3728 cfun->local_decls = tree_cons (NULL_TREE, var,
3729 cfun->local_decls);
3731 else if (!can_be_nonlocal (var, id))
3732 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3733 cfun->local_decls);
3736 /* This is it. Duplicate the callee body. Assume callee is
3737 pre-gimplified. Note that we must not alter the caller
3738 function in any way before this point, as this CALL_EXPR may be
3739 a self-referential call; if we're calling ourselves, we need to
3740 duplicate our body before altering anything. */
3741 copy_body (id, bb->count, bb->frequency, bb, return_block);
3743 /* Reset the escaped and callused solutions. */
3744 if (cfun->gimple_df)
3746 pt_solution_reset (&cfun->gimple_df->escaped);
3747 pt_solution_reset (&cfun->gimple_df->callused);
3750 /* Clean up. */
3751 if (id->debug_map)
3753 pointer_map_destroy (id->debug_map);
3754 id->debug_map = dst;
3756 pointer_map_destroy (id->decl_map);
3757 id->decl_map = st;
3759 /* Unlink the calls virtual operands before replacing it. */
3760 unlink_stmt_vdef (stmt);
3762 /* If the inlined function returns a result that we care about,
3763 substitute the GIMPLE_CALL with an assignment of the return
3764 variable to the LHS of the call. That is, if STMT was
3765 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3766 if (use_retvar && gimple_call_lhs (stmt))
3768 gimple old_stmt = stmt;
3769 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3770 gsi_replace (&stmt_gsi, stmt, false);
3771 if (gimple_in_ssa_p (cfun))
3772 mark_symbols_for_renaming (stmt);
3773 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3775 else
3777 /* Handle the case of inlining a function with no return
3778 statement, which causes the return value to become undefined. */
3779 if (gimple_call_lhs (stmt)
3780 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3782 tree name = gimple_call_lhs (stmt);
3783 tree var = SSA_NAME_VAR (name);
3784 tree def = gimple_default_def (cfun, var);
3786 if (def)
3788 /* If the variable is used undefined, make this name
3789 undefined via a move. */
3790 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3791 gsi_replace (&stmt_gsi, stmt, true);
3793 else
3795 /* Otherwise make this variable undefined. */
3796 gsi_remove (&stmt_gsi, true);
3797 set_default_def (var, name);
3798 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3801 else
3802 gsi_remove (&stmt_gsi, true);
3805 if (purge_dead_abnormal_edges)
3806 gimple_purge_dead_abnormal_call_edges (return_block);
3808 /* If the value of the new expression is ignored, that's OK. We
3809 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3810 the equivalent inlined version either. */
3811 if (is_gimple_assign (stmt))
3813 gcc_assert (gimple_assign_single_p (stmt)
3814 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3815 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3818 /* Output the inlining info for this abstract function, since it has been
3819 inlined. If we don't do this now, we can lose the information about the
3820 variables in the function when the blocks get blown away as soon as we
3821 remove the cgraph node. */
3822 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3824 /* Update callgraph if needed. */
3825 cgraph_remove_node (cg_edge->callee);
3827 id->block = NULL_TREE;
3828 successfully_inlined = TRUE;
3830 egress:
3831 input_location = saved_location;
3832 return successfully_inlined;
3835 /* Expand call statements reachable from STMT_P.
3836 We can only have CALL_EXPRs as the "toplevel" tree code or nested
3837 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
3838 unfortunately not use that function here because we need a pointer
3839 to the CALL_EXPR, not the tree itself. */
3841 static bool
3842 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3844 gimple_stmt_iterator gsi;
3846 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3848 gimple stmt = gsi_stmt (gsi);
3850 if (is_gimple_call (stmt)
3851 && expand_call_inline (bb, stmt, id))
3852 return true;
3855 return false;
3859 /* Walk all basic blocks created after FIRST and try to fold every statement
3860 in the STATEMENTS pointer set. */
3862 static void
3863 fold_marked_statements (int first, struct pointer_set_t *statements)
3865 for (; first < n_basic_blocks; first++)
3866 if (BASIC_BLOCK (first))
3868 gimple_stmt_iterator gsi;
3870 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3871 !gsi_end_p (gsi);
3872 gsi_next (&gsi))
3873 if (pointer_set_contains (statements, gsi_stmt (gsi)))
3875 gimple old_stmt = gsi_stmt (gsi);
3876 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3878 if (old_decl && DECL_BUILT_IN (old_decl))
3880 /* Folding builtins can create multiple instructions,
3881 we need to look at all of them. */
3882 gimple_stmt_iterator i2 = gsi;
3883 gsi_prev (&i2);
3884 if (fold_stmt (&gsi))
3886 gimple new_stmt;
3887 if (gsi_end_p (i2))
3888 i2 = gsi_start_bb (BASIC_BLOCK (first));
3889 else
3890 gsi_next (&i2);
3891 while (1)
3893 new_stmt = gsi_stmt (i2);
3894 update_stmt (new_stmt);
3895 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3896 new_stmt);
3898 if (new_stmt == gsi_stmt (gsi))
3900 /* It is okay to check only for the very last
3901 of these statements. If it is a throwing
3902 statement nothing will change. If it isn't
3903 this can remove EH edges. If that weren't
3904 correct then because some intermediate stmts
3905 throw, but not the last one. That would mean
3906 we'd have to split the block, which we can't
3907 here and we'd loose anyway. And as builtins
3908 probably never throw, this all
3909 is mood anyway. */
3910 if (maybe_clean_or_replace_eh_stmt (old_stmt,
3911 new_stmt))
3912 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3913 break;
3915 gsi_next (&i2);
3919 else if (fold_stmt (&gsi))
3921 /* Re-read the statement from GSI as fold_stmt() may
3922 have changed it. */
3923 gimple new_stmt = gsi_stmt (gsi);
3924 update_stmt (new_stmt);
3926 if (is_gimple_call (old_stmt)
3927 || is_gimple_call (new_stmt))
3928 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3929 new_stmt);
3931 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
3932 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3938 /* Return true if BB has at least one abnormal outgoing edge. */
3940 static inline bool
3941 has_abnormal_outgoing_edge_p (basic_block bb)
3943 edge e;
3944 edge_iterator ei;
3946 FOR_EACH_EDGE (e, ei, bb->succs)
3947 if (e->flags & EDGE_ABNORMAL)
3948 return true;
3950 return false;
3953 /* Expand calls to inline functions in the body of FN. */
3955 unsigned int
3956 optimize_inline_calls (tree fn)
3958 copy_body_data id;
3959 tree prev_fn;
3960 basic_block bb;
3961 int last = n_basic_blocks;
3962 struct gimplify_ctx gctx;
3964 /* There is no point in performing inlining if errors have already
3965 occurred -- and we might crash if we try to inline invalid
3966 code. */
3967 if (errorcount || sorrycount)
3968 return 0;
3970 /* Clear out ID. */
3971 memset (&id, 0, sizeof (id));
3973 id.src_node = id.dst_node = cgraph_node (fn);
3974 id.dst_fn = fn;
3975 /* Or any functions that aren't finished yet. */
3976 prev_fn = NULL_TREE;
3977 if (current_function_decl)
3979 id.dst_fn = current_function_decl;
3980 prev_fn = current_function_decl;
3983 id.copy_decl = copy_decl_maybe_to_var;
3984 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3985 id.transform_new_cfg = false;
3986 id.transform_return_to_modify = true;
3987 id.transform_lang_insert_block = NULL;
3988 id.statements_to_fold = pointer_set_create ();
3990 push_gimplify_context (&gctx);
3992 /* We make no attempts to keep dominance info up-to-date. */
3993 free_dominance_info (CDI_DOMINATORS);
3994 free_dominance_info (CDI_POST_DOMINATORS);
3996 /* Register specific gimple functions. */
3997 gimple_register_cfg_hooks ();
3999 /* Reach the trees by walking over the CFG, and note the
4000 enclosing basic-blocks in the call edges. */
4001 /* We walk the blocks going forward, because inlined function bodies
4002 will split id->current_basic_block, and the new blocks will
4003 follow it; we'll trudge through them, processing their CALL_EXPRs
4004 along the way. */
4005 FOR_EACH_BB (bb)
4006 gimple_expand_calls_inline (bb, &id);
4008 pop_gimplify_context (NULL);
4010 #ifdef ENABLE_CHECKING
4012 struct cgraph_edge *e;
4014 verify_cgraph_node (id.dst_node);
4016 /* Double check that we inlined everything we are supposed to inline. */
4017 for (e = id.dst_node->callees; e; e = e->next_callee)
4018 gcc_assert (e->inline_failed);
4020 #endif
4022 /* Fold the statements before compacting/renumbering the basic blocks. */
4023 fold_marked_statements (last, id.statements_to_fold);
4024 pointer_set_destroy (id.statements_to_fold);
4026 gcc_assert (!id.debug_stmts);
4028 /* Renumber the (code) basic_blocks consecutively. */
4029 compact_blocks ();
4030 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4031 number_blocks (fn);
4033 fold_cond_expr_cond ();
4034 delete_unreachable_blocks_update_callgraph (&id);
4035 #ifdef ENABLE_CHECKING
4036 verify_cgraph_node (id.dst_node);
4037 #endif
4039 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4040 not possible yet - the IPA passes might make various functions to not
4041 throw and they don't care to proactively update local EH info. This is
4042 done later in fixup_cfg pass that also execute the verification. */
4043 return (TODO_update_ssa
4044 | TODO_cleanup_cfg
4045 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4046 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4049 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4051 tree
4052 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4054 enum tree_code code = TREE_CODE (*tp);
4055 enum tree_code_class cl = TREE_CODE_CLASS (code);
4057 /* We make copies of most nodes. */
4058 if (IS_EXPR_CODE_CLASS (cl)
4059 || code == TREE_LIST
4060 || code == TREE_VEC
4061 || code == TYPE_DECL
4062 || code == OMP_CLAUSE)
4064 /* Because the chain gets clobbered when we make a copy, we save it
4065 here. */
4066 tree chain = NULL_TREE, new_tree;
4068 chain = TREE_CHAIN (*tp);
4070 /* Copy the node. */
4071 new_tree = copy_node (*tp);
4073 /* Propagate mudflap marked-ness. */
4074 if (flag_mudflap && mf_marked_p (*tp))
4075 mf_mark (new_tree);
4077 *tp = new_tree;
4079 /* Now, restore the chain, if appropriate. That will cause
4080 walk_tree to walk into the chain as well. */
4081 if (code == PARM_DECL
4082 || code == TREE_LIST
4083 || code == OMP_CLAUSE)
4084 TREE_CHAIN (*tp) = chain;
4086 /* For now, we don't update BLOCKs when we make copies. So, we
4087 have to nullify all BIND_EXPRs. */
4088 if (TREE_CODE (*tp) == BIND_EXPR)
4089 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4091 else if (code == CONSTRUCTOR)
4093 /* CONSTRUCTOR nodes need special handling because
4094 we need to duplicate the vector of elements. */
4095 tree new_tree;
4097 new_tree = copy_node (*tp);
4099 /* Propagate mudflap marked-ness. */
4100 if (flag_mudflap && mf_marked_p (*tp))
4101 mf_mark (new_tree);
4103 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4104 CONSTRUCTOR_ELTS (*tp));
4105 *tp = new_tree;
4107 else if (TREE_CODE_CLASS (code) == tcc_type)
4108 *walk_subtrees = 0;
4109 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4110 *walk_subtrees = 0;
4111 else if (TREE_CODE_CLASS (code) == tcc_constant)
4112 *walk_subtrees = 0;
4113 else
4114 gcc_assert (code != STATEMENT_LIST);
4115 return NULL_TREE;
4118 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4119 information indicating to what new SAVE_EXPR this one should be mapped,
4120 use that one. Otherwise, create a new node and enter it in ST. FN is
4121 the function into which the copy will be placed. */
4123 static void
4124 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4126 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4127 tree *n;
4128 tree t;
4130 /* See if we already encountered this SAVE_EXPR. */
4131 n = (tree *) pointer_map_contains (st, *tp);
4133 /* If we didn't already remap this SAVE_EXPR, do so now. */
4134 if (!n)
4136 t = copy_node (*tp);
4138 /* Remember this SAVE_EXPR. */
4139 *pointer_map_insert (st, *tp) = t;
4140 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4141 *pointer_map_insert (st, t) = t;
4143 else
4145 /* We've already walked into this SAVE_EXPR; don't do it again. */
4146 *walk_subtrees = 0;
4147 t = *n;
4150 /* Replace this SAVE_EXPR with the copy. */
4151 *tp = t;
4154 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4155 copies the declaration and enters it in the splay_tree in DATA (which is
4156 really an `copy_body_data *'). */
4158 static tree
4159 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4160 void *data)
4162 copy_body_data *id = (copy_body_data *) data;
4164 /* Don't walk into types. */
4165 if (TYPE_P (*tp))
4166 *walk_subtrees = 0;
4168 else if (TREE_CODE (*tp) == LABEL_EXPR)
4170 tree decl = TREE_OPERAND (*tp, 0);
4172 /* Copy the decl and remember the copy. */
4173 insert_decl_map (id, decl, id->copy_decl (decl, id));
4176 return NULL_TREE;
4179 /* Perform any modifications to EXPR required when it is unsaved. Does
4180 not recurse into EXPR's subtrees. */
4182 static void
4183 unsave_expr_1 (tree expr)
4185 switch (TREE_CODE (expr))
4187 case TARGET_EXPR:
4188 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4189 It's OK for this to happen if it was part of a subtree that
4190 isn't immediately expanded, such as operand 2 of another
4191 TARGET_EXPR. */
4192 if (TREE_OPERAND (expr, 1))
4193 break;
4195 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4196 TREE_OPERAND (expr, 3) = NULL_TREE;
4197 break;
4199 default:
4200 break;
4204 /* Called via walk_tree when an expression is unsaved. Using the
4205 splay_tree pointed to by ST (which is really a `splay_tree'),
4206 remaps all local declarations to appropriate replacements. */
4208 static tree
4209 unsave_r (tree *tp, int *walk_subtrees, void *data)
4211 copy_body_data *id = (copy_body_data *) data;
4212 struct pointer_map_t *st = id->decl_map;
4213 tree *n;
4215 /* Only a local declaration (variable or label). */
4216 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4217 || TREE_CODE (*tp) == LABEL_DECL)
4219 /* Lookup the declaration. */
4220 n = (tree *) pointer_map_contains (st, *tp);
4222 /* If it's there, remap it. */
4223 if (n)
4224 *tp = *n;
4227 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4228 gcc_unreachable ();
4229 else if (TREE_CODE (*tp) == BIND_EXPR)
4230 copy_bind_expr (tp, walk_subtrees, id);
4231 else if (TREE_CODE (*tp) == SAVE_EXPR
4232 || TREE_CODE (*tp) == TARGET_EXPR)
4233 remap_save_expr (tp, st, walk_subtrees);
4234 else
4236 copy_tree_r (tp, walk_subtrees, NULL);
4238 /* Do whatever unsaving is required. */
4239 unsave_expr_1 (*tp);
4242 /* Keep iterating. */
4243 return NULL_TREE;
4246 /* Copies everything in EXPR and replaces variables, labels
4247 and SAVE_EXPRs local to EXPR. */
4249 tree
4250 unsave_expr_now (tree expr)
4252 copy_body_data id;
4254 /* There's nothing to do for NULL_TREE. */
4255 if (expr == 0)
4256 return expr;
4258 /* Set up ID. */
4259 memset (&id, 0, sizeof (id));
4260 id.src_fn = current_function_decl;
4261 id.dst_fn = current_function_decl;
4262 id.decl_map = pointer_map_create ();
4263 id.debug_map = NULL;
4265 id.copy_decl = copy_decl_no_change;
4266 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4267 id.transform_new_cfg = false;
4268 id.transform_return_to_modify = false;
4269 id.transform_lang_insert_block = NULL;
4271 /* Walk the tree once to find local labels. */
4272 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4274 /* Walk the tree again, copying, remapping, and unsaving. */
4275 walk_tree (&expr, unsave_r, &id, NULL);
4277 /* Clean up. */
4278 pointer_map_destroy (id.decl_map);
4279 if (id.debug_map)
4280 pointer_map_destroy (id.debug_map);
4282 return expr;
4285 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4286 label, copies the declaration and enters it in the splay_tree in DATA (which
4287 is really a 'copy_body_data *'. */
4289 static tree
4290 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4291 bool *handled_ops_p ATTRIBUTE_UNUSED,
4292 struct walk_stmt_info *wi)
4294 copy_body_data *id = (copy_body_data *) wi->info;
4295 gimple stmt = gsi_stmt (*gsip);
4297 if (gimple_code (stmt) == GIMPLE_LABEL)
4299 tree decl = gimple_label_label (stmt);
4301 /* Copy the decl and remember the copy. */
4302 insert_decl_map (id, decl, id->copy_decl (decl, id));
4305 return NULL_TREE;
4309 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4310 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4311 remaps all local declarations to appropriate replacements in gimple
4312 operands. */
4314 static tree
4315 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4317 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4318 copy_body_data *id = (copy_body_data *) wi->info;
4319 struct pointer_map_t *st = id->decl_map;
4320 tree *n;
4321 tree expr = *tp;
4323 /* Only a local declaration (variable or label). */
4324 if ((TREE_CODE (expr) == VAR_DECL
4325 && !TREE_STATIC (expr))
4326 || TREE_CODE (expr) == LABEL_DECL)
4328 /* Lookup the declaration. */
4329 n = (tree *) pointer_map_contains (st, expr);
4331 /* If it's there, remap it. */
4332 if (n)
4333 *tp = *n;
4334 *walk_subtrees = 0;
4336 else if (TREE_CODE (expr) == STATEMENT_LIST
4337 || TREE_CODE (expr) == BIND_EXPR
4338 || TREE_CODE (expr) == SAVE_EXPR)
4339 gcc_unreachable ();
4340 else if (TREE_CODE (expr) == TARGET_EXPR)
4342 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4343 It's OK for this to happen if it was part of a subtree that
4344 isn't immediately expanded, such as operand 2 of another
4345 TARGET_EXPR. */
4346 if (!TREE_OPERAND (expr, 1))
4348 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4349 TREE_OPERAND (expr, 3) = NULL_TREE;
4353 /* Keep iterating. */
4354 return NULL_TREE;
4358 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4359 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4360 remaps all local declarations to appropriate replacements in gimple
4361 statements. */
4363 static tree
4364 replace_locals_stmt (gimple_stmt_iterator *gsip,
4365 bool *handled_ops_p ATTRIBUTE_UNUSED,
4366 struct walk_stmt_info *wi)
4368 copy_body_data *id = (copy_body_data *) wi->info;
4369 gimple stmt = gsi_stmt (*gsip);
4371 if (gimple_code (stmt) == GIMPLE_BIND)
4373 tree block = gimple_bind_block (stmt);
4375 if (block)
4377 remap_block (&block, id);
4378 gimple_bind_set_block (stmt, block);
4381 /* This will remap a lot of the same decls again, but this should be
4382 harmless. */
4383 if (gimple_bind_vars (stmt))
4384 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4387 /* Keep iterating. */
4388 return NULL_TREE;
4392 /* Copies everything in SEQ and replaces variables and labels local to
4393 current_function_decl. */
4395 gimple_seq
4396 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4398 copy_body_data id;
4399 struct walk_stmt_info wi;
4400 struct pointer_set_t *visited;
4401 gimple_seq copy;
4403 /* There's nothing to do for NULL_TREE. */
4404 if (seq == NULL)
4405 return seq;
4407 /* Set up ID. */
4408 memset (&id, 0, sizeof (id));
4409 id.src_fn = current_function_decl;
4410 id.dst_fn = current_function_decl;
4411 id.decl_map = pointer_map_create ();
4412 id.debug_map = NULL;
4414 id.copy_decl = copy_decl_no_change;
4415 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4416 id.transform_new_cfg = false;
4417 id.transform_return_to_modify = false;
4418 id.transform_lang_insert_block = NULL;
4420 /* Walk the tree once to find local labels. */
4421 memset (&wi, 0, sizeof (wi));
4422 visited = pointer_set_create ();
4423 wi.info = &id;
4424 wi.pset = visited;
4425 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4426 pointer_set_destroy (visited);
4428 copy = gimple_seq_copy (seq);
4430 /* Walk the copy, remapping decls. */
4431 memset (&wi, 0, sizeof (wi));
4432 wi.info = &id;
4433 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4435 /* Clean up. */
4436 pointer_map_destroy (id.decl_map);
4437 if (id.debug_map)
4438 pointer_map_destroy (id.debug_map);
4440 return copy;
4444 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4446 static tree
4447 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4449 if (*tp == data)
4450 return (tree) data;
4451 else
4452 return NULL;
4455 bool
4456 debug_find_tree (tree top, tree search)
4458 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4462 /* Declare the variables created by the inliner. Add all the variables in
4463 VARS to BIND_EXPR. */
4465 static void
4466 declare_inline_vars (tree block, tree vars)
4468 tree t;
4469 for (t = vars; t; t = TREE_CHAIN (t))
4471 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4472 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4473 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4476 if (block)
4477 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4480 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4481 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4482 VAR_DECL translation. */
4484 static tree
4485 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4487 /* Don't generate debug information for the copy if we wouldn't have
4488 generated it for the copy either. */
4489 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4490 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4492 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4493 declaration inspired this copy. */
4494 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4496 /* The new variable/label has no RTL, yet. */
4497 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4498 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4499 SET_DECL_RTL (copy, NULL_RTX);
4501 /* These args would always appear unused, if not for this. */
4502 TREE_USED (copy) = 1;
4504 /* Set the context for the new declaration. */
4505 if (!DECL_CONTEXT (decl))
4506 /* Globals stay global. */
4508 else if (DECL_CONTEXT (decl) != id->src_fn)
4509 /* Things that weren't in the scope of the function we're inlining
4510 from aren't in the scope we're inlining to, either. */
4512 else if (TREE_STATIC (decl))
4513 /* Function-scoped static variables should stay in the original
4514 function. */
4516 else
4517 /* Ordinary automatic local variables are now in the scope of the
4518 new function. */
4519 DECL_CONTEXT (copy) = id->dst_fn;
4521 return copy;
4524 static tree
4525 copy_decl_to_var (tree decl, copy_body_data *id)
4527 tree copy, type;
4529 gcc_assert (TREE_CODE (decl) == PARM_DECL
4530 || TREE_CODE (decl) == RESULT_DECL);
4532 type = TREE_TYPE (decl);
4534 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4535 VAR_DECL, DECL_NAME (decl), type);
4536 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4537 TREE_READONLY (copy) = TREE_READONLY (decl);
4538 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4539 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4541 return copy_decl_for_dup_finish (id, decl, copy);
4544 /* Like copy_decl_to_var, but create a return slot object instead of a
4545 pointer variable for return by invisible reference. */
4547 static tree
4548 copy_result_decl_to_var (tree decl, copy_body_data *id)
4550 tree copy, type;
4552 gcc_assert (TREE_CODE (decl) == PARM_DECL
4553 || TREE_CODE (decl) == RESULT_DECL);
4555 type = TREE_TYPE (decl);
4556 if (DECL_BY_REFERENCE (decl))
4557 type = TREE_TYPE (type);
4559 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4560 VAR_DECL, DECL_NAME (decl), type);
4561 TREE_READONLY (copy) = TREE_READONLY (decl);
4562 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4563 if (!DECL_BY_REFERENCE (decl))
4565 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4566 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4569 return copy_decl_for_dup_finish (id, decl, copy);
4572 tree
4573 copy_decl_no_change (tree decl, copy_body_data *id)
4575 tree copy;
4577 copy = copy_node (decl);
4579 /* The COPY is not abstract; it will be generated in DST_FN. */
4580 DECL_ABSTRACT (copy) = 0;
4581 lang_hooks.dup_lang_specific_decl (copy);
4583 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4584 been taken; it's for internal bookkeeping in expand_goto_internal. */
4585 if (TREE_CODE (copy) == LABEL_DECL)
4587 TREE_ADDRESSABLE (copy) = 0;
4588 LABEL_DECL_UID (copy) = -1;
4591 return copy_decl_for_dup_finish (id, decl, copy);
4594 static tree
4595 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4597 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4598 return copy_decl_to_var (decl, id);
4599 else
4600 return copy_decl_no_change (decl, id);
4603 /* Return a copy of the function's argument tree. */
4604 static tree
4605 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4606 bitmap args_to_skip, tree *vars)
4608 tree arg, *parg;
4609 tree new_parm = NULL;
4610 int i = 0;
4612 parg = &new_parm;
4614 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4615 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4617 tree new_tree = remap_decl (arg, id);
4618 lang_hooks.dup_lang_specific_decl (new_tree);
4619 *parg = new_tree;
4620 parg = &TREE_CHAIN (new_tree);
4622 else if (!pointer_map_contains (id->decl_map, arg))
4624 /* Make an equivalent VAR_DECL. If the argument was used
4625 as temporary variable later in function, the uses will be
4626 replaced by local variable. */
4627 tree var = copy_decl_to_var (arg, id);
4628 get_var_ann (var);
4629 add_referenced_var (var);
4630 insert_decl_map (id, arg, var);
4631 /* Declare this new variable. */
4632 TREE_CHAIN (var) = *vars;
4633 *vars = var;
4635 return new_parm;
4638 /* Return a copy of the function's static chain. */
4639 static tree
4640 copy_static_chain (tree static_chain, copy_body_data * id)
4642 tree *chain_copy, *pvar;
4644 chain_copy = &static_chain;
4645 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4647 tree new_tree = remap_decl (*pvar, id);
4648 lang_hooks.dup_lang_specific_decl (new_tree);
4649 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4650 *pvar = new_tree;
4652 return static_chain;
4655 /* Return true if the function is allowed to be versioned.
4656 This is a guard for the versioning functionality. */
4658 bool
4659 tree_versionable_function_p (tree fndecl)
4661 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4662 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4665 /* Delete all unreachable basic blocks and update callgraph.
4666 Doing so is somewhat nontrivial because we need to update all clones and
4667 remove inline function that become unreachable. */
4669 static bool
4670 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4672 bool changed = false;
4673 basic_block b, next_bb;
4675 find_unreachable_blocks ();
4677 /* Delete all unreachable basic blocks. */
4679 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4681 next_bb = b->next_bb;
4683 if (!(b->flags & BB_REACHABLE))
4685 gimple_stmt_iterator bsi;
4687 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4688 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4690 struct cgraph_edge *e;
4691 struct cgraph_node *node;
4693 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4695 if (!e->inline_failed)
4696 cgraph_remove_node_and_inline_clones (e->callee);
4697 else
4698 cgraph_remove_edge (e);
4700 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4701 && id->dst_node->clones)
4702 for (node = id->dst_node->clones; node != id->dst_node;)
4704 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4706 if (!e->inline_failed)
4707 cgraph_remove_node_and_inline_clones (e->callee);
4708 else
4709 cgraph_remove_edge (e);
4712 if (node->clones)
4713 node = node->clones;
4714 else if (node->next_sibling_clone)
4715 node = node->next_sibling_clone;
4716 else
4718 while (node != id->dst_node && !node->next_sibling_clone)
4719 node = node->clone_of;
4720 if (node != id->dst_node)
4721 node = node->next_sibling_clone;
4725 delete_basic_block (b);
4726 changed = true;
4730 if (changed)
4731 tidy_fallthru_edges ();
4732 #ifdef ENABLE_CHECKING0
4733 verify_cgraph_node (id->dst_node);
4734 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4735 && id->dst_node->clones)
4737 struct cgraph_node *node;
4738 for (node = id->dst_node->clones; node != id->dst_node;)
4740 verify_cgraph_node (node);
4742 if (node->clones)
4743 node = node->clones;
4744 else if (node->next_sibling_clone)
4745 node = node->next_sibling_clone;
4746 else
4748 while (node != id->dst_node && !node->next_sibling_clone)
4749 node = node->clone_of;
4750 if (node != id->dst_node)
4751 node = node->next_sibling_clone;
4755 #endif
4756 return changed;
4759 /* Update clone info after duplication. */
4761 static void
4762 update_clone_info (copy_body_data * id)
4764 struct cgraph_node *node;
4765 if (!id->dst_node->clones)
4766 return;
4767 for (node = id->dst_node->clones; node != id->dst_node;)
4769 /* First update replace maps to match the new body. */
4770 if (node->clone.tree_map)
4772 unsigned int i;
4773 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4775 struct ipa_replace_map *replace_info;
4776 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4777 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4778 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4781 if (node->clones)
4782 node = node->clones;
4783 else if (node->next_sibling_clone)
4784 node = node->next_sibling_clone;
4785 else
4787 while (node != id->dst_node && !node->next_sibling_clone)
4788 node = node->clone_of;
4789 if (node != id->dst_node)
4790 node = node->next_sibling_clone;
4795 /* Create a copy of a function's tree.
4796 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4797 of the original function and the new copied function
4798 respectively. In case we want to replace a DECL
4799 tree with another tree while duplicating the function's
4800 body, TREE_MAP represents the mapping between these
4801 trees. If UPDATE_CLONES is set, the call_stmt fields
4802 of edges of clones of the function will be updated. */
4803 void
4804 tree_function_versioning (tree old_decl, tree new_decl,
4805 VEC(ipa_replace_map_p,gc)* tree_map,
4806 bool update_clones, bitmap args_to_skip)
4808 struct cgraph_node *old_version_node;
4809 struct cgraph_node *new_version_node;
4810 copy_body_data id;
4811 tree p;
4812 unsigned i;
4813 struct ipa_replace_map *replace_info;
4814 basic_block old_entry_block, bb;
4815 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4817 tree t_step;
4818 tree old_current_function_decl = current_function_decl;
4819 tree vars = NULL_TREE;
4821 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4822 && TREE_CODE (new_decl) == FUNCTION_DECL);
4823 DECL_POSSIBLY_INLINED (old_decl) = 1;
4825 old_version_node = cgraph_node (old_decl);
4826 new_version_node = cgraph_node (new_decl);
4828 /* Output the inlining info for this abstract function, since it has been
4829 inlined. If we don't do this now, we can lose the information about the
4830 variables in the function when the blocks get blown away as soon as we
4831 remove the cgraph node. */
4832 (*debug_hooks->outlining_inline_function) (old_decl);
4834 DECL_ARTIFICIAL (new_decl) = 1;
4835 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4836 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
4838 /* Prepare the data structures for the tree copy. */
4839 memset (&id, 0, sizeof (id));
4841 /* Generate a new name for the new version. */
4842 id.statements_to_fold = pointer_set_create ();
4844 id.decl_map = pointer_map_create ();
4845 id.debug_map = NULL;
4846 id.src_fn = old_decl;
4847 id.dst_fn = new_decl;
4848 id.src_node = old_version_node;
4849 id.dst_node = new_version_node;
4850 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4852 id.copy_decl = copy_decl_no_change;
4853 id.transform_call_graph_edges
4854 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4855 id.transform_new_cfg = true;
4856 id.transform_return_to_modify = false;
4857 id.transform_lang_insert_block = NULL;
4859 current_function_decl = new_decl;
4860 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4861 (DECL_STRUCT_FUNCTION (old_decl));
4862 initialize_cfun (new_decl, old_decl,
4863 old_entry_block->count,
4864 old_entry_block->frequency);
4865 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4867 /* Copy the function's static chain. */
4868 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4869 if (p)
4870 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4871 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4872 &id);
4874 /* If there's a tree_map, prepare for substitution. */
4875 if (tree_map)
4876 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4878 gimple init;
4879 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4880 if (replace_info->replace_p)
4882 tree op = replace_info->new_tree;
4884 STRIP_NOPS (op);
4886 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4887 op = TREE_OPERAND (op, 0);
4889 if (TREE_CODE (op) == ADDR_EXPR)
4891 op = TREE_OPERAND (op, 0);
4892 while (handled_component_p (op))
4893 op = TREE_OPERAND (op, 0);
4894 if (TREE_CODE (op) == VAR_DECL)
4895 add_referenced_var (op);
4897 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4898 init = setup_one_parameter (&id, replace_info->old_tree,
4899 replace_info->new_tree, id.src_fn,
4900 NULL,
4901 &vars);
4902 if (init)
4903 VEC_safe_push (gimple, heap, init_stmts, init);
4906 /* Copy the function's arguments. */
4907 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4908 DECL_ARGUMENTS (new_decl) =
4909 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4910 args_to_skip, &vars);
4912 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4914 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4915 number_blocks (id.dst_fn);
4917 declare_inline_vars (DECL_INITIAL (new_decl), vars);
4919 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4920 /* Add local vars. */
4921 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4922 t_step; t_step = TREE_CHAIN (t_step))
4924 tree var = TREE_VALUE (t_step);
4925 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
4926 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
4927 else if (!can_be_nonlocal (var, &id))
4928 cfun->local_decls =
4929 tree_cons (NULL_TREE, remap_decl (var, &id),
4930 cfun->local_decls);
4933 /* Copy the Function's body. */
4934 copy_body (&id, old_entry_block->count, old_entry_block->frequency,
4935 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
4937 if (DECL_RESULT (old_decl) != NULL_TREE)
4939 tree *res_decl = &DECL_RESULT (old_decl);
4940 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4941 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4944 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4945 number_blocks (new_decl);
4947 /* We want to create the BB unconditionally, so that the addition of
4948 debug stmts doesn't affect BB count, which may in the end cause
4949 codegen differences. */
4950 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
4951 while (VEC_length (gimple, init_stmts))
4952 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
4953 update_clone_info (&id);
4955 /* Remap the nonlocal_goto_save_area, if any. */
4956 if (cfun->nonlocal_goto_save_area)
4958 struct walk_stmt_info wi;
4960 memset (&wi, 0, sizeof (wi));
4961 wi.info = &id;
4962 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
4965 /* Clean up. */
4966 pointer_map_destroy (id.decl_map);
4967 if (id.debug_map)
4968 pointer_map_destroy (id.debug_map);
4969 free_dominance_info (CDI_DOMINATORS);
4970 free_dominance_info (CDI_POST_DOMINATORS);
4972 fold_marked_statements (0, id.statements_to_fold);
4973 pointer_set_destroy (id.statements_to_fold);
4974 fold_cond_expr_cond ();
4975 delete_unreachable_blocks_update_callgraph (&id);
4976 update_ssa (TODO_update_ssa);
4977 free_dominance_info (CDI_DOMINATORS);
4978 free_dominance_info (CDI_POST_DOMINATORS);
4980 gcc_assert (!id.debug_stmts);
4981 VEC_free (gimple, heap, init_stmts);
4982 pop_cfun ();
4983 current_function_decl = old_current_function_decl;
4984 gcc_assert (!current_function_decl
4985 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
4986 return;
4989 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
4990 the callee and return the inlined body on success. */
4992 tree
4993 maybe_inline_call_in_expr (tree exp)
4995 tree fn = get_callee_fndecl (exp);
4997 /* We can only try to inline "const" functions. */
4998 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5000 struct pointer_map_t *decl_map = pointer_map_create ();
5001 call_expr_arg_iterator iter;
5002 copy_body_data id;
5003 tree param, arg, t;
5005 /* Remap the parameters. */
5006 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5007 param;
5008 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
5009 *pointer_map_insert (decl_map, param) = arg;
5011 memset (&id, 0, sizeof (id));
5012 id.src_fn = fn;
5013 id.dst_fn = current_function_decl;
5014 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5015 id.decl_map = decl_map;
5017 id.copy_decl = copy_decl_no_change;
5018 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5019 id.transform_new_cfg = false;
5020 id.transform_return_to_modify = true;
5021 id.transform_lang_insert_block = false;
5023 /* Make sure not to unshare trees behind the front-end's back
5024 since front-end specific mechanisms may rely on sharing. */
5025 id.regimplify = false;
5026 id.do_not_unshare = true;
5028 /* We're not inside any EH region. */
5029 id.eh_lp_nr = 0;
5031 t = copy_tree_body (&id);
5032 pointer_map_destroy (decl_map);
5034 /* We can only return something suitable for use in a GENERIC
5035 expression tree. */
5036 if (TREE_CODE (t) == MODIFY_EXPR)
5037 return TREE_OPERAND (t, 1);
5040 return NULL_TREE;
5043 /* Duplicate a type, fields and all. */
5045 tree
5046 build_duplicate_type (tree type)
5048 struct copy_body_data id;
5050 memset (&id, 0, sizeof (id));
5051 id.src_fn = current_function_decl;
5052 id.dst_fn = current_function_decl;
5053 id.src_cfun = cfun;
5054 id.decl_map = pointer_map_create ();
5055 id.debug_map = NULL;
5056 id.copy_decl = copy_decl_no_change;
5058 type = remap_type_1 (type, &id);
5060 pointer_map_destroy (id.decl_map);
5061 if (id.debug_map)
5062 pointer_map_destroy (id.debug_map);
5064 TYPE_CANONICAL (type) = type;
5066 return type;
5069 /* Return whether it is safe to inline a function because it used different
5070 target specific options or call site actual types mismatch parameter types.
5071 E is the call edge to be checked. */
5072 bool
5073 tree_can_inline_p (struct cgraph_edge *e)
5075 #if 0
5076 /* This causes a regression in SPEC in that it prevents a cold function from
5077 inlining a hot function. Perhaps this should only apply to functions
5078 that the user declares hot/cold/optimize explicitly. */
5080 /* Don't inline a function with a higher optimization level than the
5081 caller, or with different space constraints (hot/cold functions). */
5082 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5083 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5085 if (caller_tree != callee_tree)
5087 struct cl_optimization *caller_opt
5088 = TREE_OPTIMIZATION ((caller_tree)
5089 ? caller_tree
5090 : optimization_default_node);
5092 struct cl_optimization *callee_opt
5093 = TREE_OPTIMIZATION ((callee_tree)
5094 ? callee_tree
5095 : optimization_default_node);
5097 if ((caller_opt->optimize > callee_opt->optimize)
5098 || (caller_opt->optimize_size != callee_opt->optimize_size))
5099 return false;
5101 #endif
5102 tree caller, callee;
5104 caller = e->caller->decl;
5105 callee = e->callee->decl;
5107 /* We cannot inline a function that uses a different EH personality
5108 than the caller. */
5109 if (DECL_FUNCTION_PERSONALITY (caller)
5110 && DECL_FUNCTION_PERSONALITY (callee)
5111 && (DECL_FUNCTION_PERSONALITY (caller)
5112 != DECL_FUNCTION_PERSONALITY (callee)))
5114 e->inline_failed = CIF_UNSPECIFIED;
5115 gimple_call_set_cannot_inline (e->call_stmt, true);
5116 return false;
5119 /* Allow the backend to decide if inlining is ok. */
5120 if (!targetm.target_option.can_inline_p (caller, callee))
5122 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5123 gimple_call_set_cannot_inline (e->call_stmt, true);
5124 e->call_stmt_cannot_inline_p = true;
5125 return false;
5128 if (e->call_stmt
5129 && !gimple_check_call_args (e->call_stmt))
5131 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5132 gimple_call_set_cannot_inline (e->call_stmt, true);
5133 e->call_stmt_cannot_inline_p = true;
5134 return false;
5137 return true;