* zh_CN.po: Update.
[official-gcc.git] / gcc / tree-inline.c
blobde8ca707ce238de760255ec912d3525eaafb91b8
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "hashtab.h"
36 #include "langhooks.h"
37 #include "basic-block.h"
38 #include "tree-iterator.h"
39 #include "cgraph.h"
40 #include "intl.h"
41 #include "tree-mudflap.h"
42 #include "tree-flow.h"
43 #include "function.h"
44 #include "ggc.h"
45 #include "tree-flow.h"
46 #include "diagnostic.h"
47 #include "except.h"
48 #include "debug.h"
49 #include "pointer-set.h"
50 #include "ipa-prop.h"
51 #include "value-prof.h"
52 #include "tree-pass.h"
53 #include "target.h"
54 #include "integrate.h"
56 /* I'm not real happy about this, but we need to handle gimple and
57 non-gimple trees. */
58 #include "gimple.h"
60 /* Inlining, Cloning, Versioning, Parallelization
62 Inlining: a function body is duplicated, but the PARM_DECLs are
63 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64 MODIFY_EXPRs that store to a dedicated returned-value variable.
65 The duplicated eh_region info of the copy will later be appended
66 to the info for the caller; the eh_region info in copied throwing
67 statements and RESX statements are adjusted accordingly.
69 Cloning: (only in C++) We have one body for a con/de/structor, and
70 multiple function decls, each with a unique parameter list.
71 Duplicate the body, using the given splay tree; some parameters
72 will become constants (like 0 or 1).
74 Versioning: a function body is duplicated and the result is a new
75 function rather than into blocks of an existing function as with
76 inlining. Some parameters will become constants.
78 Parallelization: a region of a function is duplicated resulting in
79 a new function. Variables may be replaced with complex expressions
80 to enable shared variable semantics.
82 All of these will simultaneously lookup any callgraph edges. If
83 we're going to inline the duplicated function body, and the given
84 function has some cloned callgraph nodes (one for each place this
85 function will be inlined) those callgraph edges will be duplicated.
86 If we're cloning the body, those callgraph edges will be
87 updated to point into the new body. (Note that the original
88 callgraph node and edge list will not be altered.)
90 See the CALL_EXPR handling case in copy_tree_body_r (). */
92 /* To Do:
94 o In order to make inlining-on-trees work, we pessimized
95 function-local static constants. In particular, they are now
96 always output, even when not addressed. Fix this by treating
97 function-local static constants just like global static
98 constants; the back-end already knows not to output them if they
99 are not needed.
101 o Provide heuristics to clamp inlining of recursive template
102 calls? */
105 /* Weights that estimate_num_insns uses for heuristics in inlining. */
107 eni_weights eni_inlining_weights;
109 /* Weights that estimate_num_insns uses to estimate the size of the
110 produced code. */
112 eni_weights eni_size_weights;
114 /* Weights that estimate_num_insns uses to estimate the time necessary
115 to execute the produced code. */
117 eni_weights eni_time_weights;
119 /* Prototypes. */
121 static tree declare_return_variable (copy_body_data *, tree, tree);
122 static void remap_block (tree *, copy_body_data *);
123 static void copy_bind_expr (tree *, int *, copy_body_data *);
124 static tree mark_local_for_remap_r (tree *, int *, void *);
125 static void unsave_expr_1 (tree);
126 static tree unsave_r (tree *, int *, void *);
127 static void declare_inline_vars (tree, tree);
128 static void remap_save_expr (tree *, void *, int *);
129 static void prepend_lexical_block (tree current_block, tree new_block);
130 static tree copy_decl_to_var (tree, copy_body_data *);
131 static tree copy_result_decl_to_var (tree, copy_body_data *);
132 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133 static gimple remap_gimple_stmt (gimple, copy_body_data *);
134 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
136 /* Insert a tree->tree mapping for ID. Despite the name suggests
137 that the trees should be variables, it is used for more than that. */
139 void
140 insert_decl_map (copy_body_data *id, tree key, tree value)
142 *pointer_map_insert (id->decl_map, key) = value;
144 /* Always insert an identity map as well. If we see this same new
145 node again, we won't want to duplicate it a second time. */
146 if (key != value)
147 *pointer_map_insert (id->decl_map, value) = value;
150 /* Insert a tree->tree mapping for ID. This is only used for
151 variables. */
153 static void
154 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 if (!gimple_in_ssa_p (id->src_cfun))
157 return;
159 if (!MAY_HAVE_DEBUG_STMTS)
160 return;
162 if (!target_for_debug_bind (key))
163 return;
165 gcc_assert (TREE_CODE (key) == PARM_DECL);
166 gcc_assert (TREE_CODE (value) == VAR_DECL);
168 if (!id->debug_map)
169 id->debug_map = pointer_map_create ();
171 *pointer_map_insert (id->debug_map, key) = value;
174 /* If nonzero, we're remapping the contents of inlined debug
175 statements. If negative, an error has occurred, such as a
176 reference to a variable that isn't available in the inlined
177 context. */
178 static int processing_debug_stmt = 0;
180 /* Construct new SSA name for old NAME. ID is the inline context. */
182 static tree
183 remap_ssa_name (tree name, copy_body_data *id)
185 tree new_tree;
186 tree *n;
188 gcc_assert (TREE_CODE (name) == SSA_NAME);
190 n = (tree *) pointer_map_contains (id->decl_map, name);
191 if (n)
192 return unshare_expr (*n);
194 if (processing_debug_stmt)
196 processing_debug_stmt = -1;
197 return name;
200 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
201 in copy_bb. */
202 new_tree = remap_decl (SSA_NAME_VAR (name), id);
204 /* We might've substituted constant or another SSA_NAME for
205 the variable.
207 Replace the SSA name representing RESULT_DECL by variable during
208 inlining: this saves us from need to introduce PHI node in a case
209 return value is just partly initialized. */
210 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
211 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
212 || !id->transform_return_to_modify))
214 new_tree = make_ssa_name (new_tree, NULL);
215 insert_decl_map (id, name, new_tree);
216 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
217 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
218 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
219 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
221 /* By inlining function having uninitialized variable, we might
222 extend the lifetime (variable might get reused). This cause
223 ICE in the case we end up extending lifetime of SSA name across
224 abnormal edge, but also increase register pressure.
226 We simply initialize all uninitialized vars by 0 except
227 for case we are inlining to very first BB. We can avoid
228 this for all BBs that are not inside strongly connected
229 regions of the CFG, but this is expensive to test. */
230 if (id->entry_bb
231 && is_gimple_reg (SSA_NAME_VAR (name))
232 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
233 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
234 || EDGE_COUNT (id->entry_bb->preds) != 1))
236 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
237 gimple init_stmt;
239 init_stmt = gimple_build_assign (new_tree,
240 fold_convert (TREE_TYPE (new_tree),
241 integer_zero_node));
242 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
243 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
245 else
247 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
248 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
249 == name)
250 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
254 else
255 insert_decl_map (id, name, new_tree);
256 return new_tree;
259 /* Remap DECL during the copying of the BLOCK tree for the function. */
261 tree
262 remap_decl (tree decl, copy_body_data *id)
264 tree *n;
266 /* We only remap local variables in the current function. */
268 /* See if we have remapped this declaration. */
270 n = (tree *) pointer_map_contains (id->decl_map, decl);
272 if (!n && processing_debug_stmt)
274 processing_debug_stmt = -1;
275 return decl;
278 /* If we didn't already have an equivalent for this declaration,
279 create one now. */
280 if (!n)
282 /* Make a copy of the variable or label. */
283 tree t = id->copy_decl (decl, id);
285 /* Remember it, so that if we encounter this local entity again
286 we can reuse this copy. Do this early because remap_type may
287 need this decl for TYPE_STUB_DECL. */
288 insert_decl_map (id, decl, t);
290 if (!DECL_P (t))
291 return t;
293 /* Remap types, if necessary. */
294 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
295 if (TREE_CODE (t) == TYPE_DECL)
296 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
298 /* Remap sizes as necessary. */
299 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
300 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
302 /* If fields, do likewise for offset and qualifier. */
303 if (TREE_CODE (t) == FIELD_DECL)
305 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
306 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
307 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
310 if (cfun && gimple_in_ssa_p (cfun)
311 && (TREE_CODE (t) == VAR_DECL
312 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
314 get_var_ann (t);
315 add_referenced_var (t);
317 return t;
320 if (id->do_not_unshare)
321 return *n;
322 else
323 return unshare_expr (*n);
326 static tree
327 remap_type_1 (tree type, copy_body_data *id)
329 tree new_tree, t;
331 /* We do need a copy. build and register it now. If this is a pointer or
332 reference type, remap the designated type and make a new pointer or
333 reference type. */
334 if (TREE_CODE (type) == POINTER_TYPE)
336 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
337 TYPE_MODE (type),
338 TYPE_REF_CAN_ALIAS_ALL (type));
339 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
340 new_tree = build_type_attribute_qual_variant (new_tree,
341 TYPE_ATTRIBUTES (type),
342 TYPE_QUALS (type));
343 insert_decl_map (id, type, new_tree);
344 return new_tree;
346 else if (TREE_CODE (type) == REFERENCE_TYPE)
348 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
349 TYPE_MODE (type),
350 TYPE_REF_CAN_ALIAS_ALL (type));
351 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
352 new_tree = build_type_attribute_qual_variant (new_tree,
353 TYPE_ATTRIBUTES (type),
354 TYPE_QUALS (type));
355 insert_decl_map (id, type, new_tree);
356 return new_tree;
358 else
359 new_tree = copy_node (type);
361 insert_decl_map (id, type, new_tree);
363 /* This is a new type, not a copy of an old type. Need to reassociate
364 variants. We can handle everything except the main variant lazily. */
365 t = TYPE_MAIN_VARIANT (type);
366 if (type != t)
368 t = remap_type (t, id);
369 TYPE_MAIN_VARIANT (new_tree) = t;
370 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
371 TYPE_NEXT_VARIANT (t) = new_tree;
373 else
375 TYPE_MAIN_VARIANT (new_tree) = new_tree;
376 TYPE_NEXT_VARIANT (new_tree) = NULL;
379 if (TYPE_STUB_DECL (type))
380 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
382 /* Lazily create pointer and reference types. */
383 TYPE_POINTER_TO (new_tree) = NULL;
384 TYPE_REFERENCE_TO (new_tree) = NULL;
386 switch (TREE_CODE (new_tree))
388 case INTEGER_TYPE:
389 case REAL_TYPE:
390 case FIXED_POINT_TYPE:
391 case ENUMERAL_TYPE:
392 case BOOLEAN_TYPE:
393 t = TYPE_MIN_VALUE (new_tree);
394 if (t && TREE_CODE (t) != INTEGER_CST)
395 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
397 t = TYPE_MAX_VALUE (new_tree);
398 if (t && TREE_CODE (t) != INTEGER_CST)
399 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
400 return new_tree;
402 case FUNCTION_TYPE:
403 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
404 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
405 return new_tree;
407 case ARRAY_TYPE:
408 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
409 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
410 break;
412 case RECORD_TYPE:
413 case UNION_TYPE:
414 case QUAL_UNION_TYPE:
416 tree f, nf = NULL;
418 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
420 t = remap_decl (f, id);
421 DECL_CONTEXT (t) = new_tree;
422 TREE_CHAIN (t) = nf;
423 nf = t;
425 TYPE_FIELDS (new_tree) = nreverse (nf);
427 break;
429 case OFFSET_TYPE:
430 default:
431 /* Shouldn't have been thought variable sized. */
432 gcc_unreachable ();
435 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
436 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
438 return new_tree;
441 tree
442 remap_type (tree type, copy_body_data *id)
444 tree *node;
445 tree tmp;
447 if (type == NULL)
448 return type;
450 /* See if we have remapped this type. */
451 node = (tree *) pointer_map_contains (id->decl_map, type);
452 if (node)
453 return *node;
455 /* The type only needs remapping if it's variably modified. */
456 if (! variably_modified_type_p (type, id->src_fn))
458 insert_decl_map (id, type, type);
459 return type;
462 id->remapping_type_depth++;
463 tmp = remap_type_1 (type, id);
464 id->remapping_type_depth--;
466 return tmp;
469 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
470 is NULL or TYPE has not been remapped before. */
472 static tree
473 remapped_type (tree type, copy_body_data *id)
475 tree *node;
477 if (type == NULL)
478 return type;
480 /* See if we have remapped this type. */
481 node = (tree *) pointer_map_contains (id->decl_map, type);
482 if (node)
483 return *node;
484 else
485 return NULL;
488 /* The type only needs remapping if it's variably modified. */
489 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
491 static bool
492 can_be_nonlocal (tree decl, copy_body_data *id)
494 /* We can not duplicate function decls. */
495 if (TREE_CODE (decl) == FUNCTION_DECL)
496 return true;
498 /* Local static vars must be non-local or we get multiple declaration
499 problems. */
500 if (TREE_CODE (decl) == VAR_DECL
501 && !auto_var_in_fn_p (decl, id->src_fn))
502 return true;
504 /* At the moment dwarf2out can handle only these types of nodes. We
505 can support more later. */
506 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
507 return false;
509 /* We must use global type. We call remapped_type instead of
510 remap_type since we don't want to remap this type here if it
511 hasn't been remapped before. */
512 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
513 return false;
515 /* Wihtout SSA we can't tell if variable is used. */
516 if (!gimple_in_ssa_p (cfun))
517 return false;
519 /* Live variables must be copied so we can attach DECL_RTL. */
520 if (var_ann (decl))
521 return false;
523 return true;
526 static tree
527 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
529 tree old_var;
530 tree new_decls = NULL_TREE;
532 /* Remap its variables. */
533 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
535 tree new_var;
536 tree origin_var = DECL_ORIGIN (old_var);
538 if (can_be_nonlocal (old_var, id))
540 if (TREE_CODE (old_var) == VAR_DECL
541 && ! DECL_EXTERNAL (old_var)
542 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
543 cfun->local_decls = tree_cons (NULL_TREE, old_var,
544 cfun->local_decls);
545 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
546 && !DECL_IGNORED_P (old_var)
547 && nonlocalized_list)
548 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
549 continue;
552 /* Remap the variable. */
553 new_var = remap_decl (old_var, id);
555 /* If we didn't remap this variable, we can't mess with its
556 TREE_CHAIN. If we remapped this variable to the return slot, it's
557 already declared somewhere else, so don't declare it here. */
559 if (new_var == id->retvar)
561 else if (!new_var)
563 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
564 && !DECL_IGNORED_P (old_var)
565 && nonlocalized_list)
566 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
568 else
570 gcc_assert (DECL_P (new_var));
571 TREE_CHAIN (new_var) = new_decls;
572 new_decls = new_var;
576 return nreverse (new_decls);
579 /* Copy the BLOCK to contain remapped versions of the variables
580 therein. And hook the new block into the block-tree. */
582 static void
583 remap_block (tree *block, copy_body_data *id)
585 tree old_block;
586 tree new_block;
588 /* Make the new block. */
589 old_block = *block;
590 new_block = make_node (BLOCK);
591 TREE_USED (new_block) = TREE_USED (old_block);
592 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
593 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
594 BLOCK_NONLOCALIZED_VARS (new_block)
595 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
596 *block = new_block;
598 /* Remap its variables. */
599 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
600 &BLOCK_NONLOCALIZED_VARS (new_block),
601 id);
603 if (id->transform_lang_insert_block)
604 id->transform_lang_insert_block (new_block);
606 /* Remember the remapped block. */
607 insert_decl_map (id, old_block, new_block);
610 /* Copy the whole block tree and root it in id->block. */
611 static tree
612 remap_blocks (tree block, copy_body_data *id)
614 tree t;
615 tree new_tree = block;
617 if (!block)
618 return NULL;
620 remap_block (&new_tree, id);
621 gcc_assert (new_tree != block);
622 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
623 prepend_lexical_block (new_tree, remap_blocks (t, id));
624 /* Blocks are in arbitrary order, but make things slightly prettier and do
625 not swap order when producing a copy. */
626 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
627 return new_tree;
630 static void
631 copy_statement_list (tree *tp)
633 tree_stmt_iterator oi, ni;
634 tree new_tree;
636 new_tree = alloc_stmt_list ();
637 ni = tsi_start (new_tree);
638 oi = tsi_start (*tp);
639 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
640 *tp = new_tree;
642 for (; !tsi_end_p (oi); tsi_next (&oi))
644 tree stmt = tsi_stmt (oi);
645 if (TREE_CODE (stmt) == STATEMENT_LIST)
646 copy_statement_list (&stmt);
647 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
651 static void
652 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
654 tree block = BIND_EXPR_BLOCK (*tp);
655 /* Copy (and replace) the statement. */
656 copy_tree_r (tp, walk_subtrees, NULL);
657 if (block)
659 remap_block (&block, id);
660 BIND_EXPR_BLOCK (*tp) = block;
663 if (BIND_EXPR_VARS (*tp))
664 /* This will remap a lot of the same decls again, but this should be
665 harmless. */
666 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
670 /* Create a new gimple_seq by remapping all the statements in BODY
671 using the inlining information in ID. */
673 gimple_seq
674 remap_gimple_seq (gimple_seq body, copy_body_data *id)
676 gimple_stmt_iterator si;
677 gimple_seq new_body = NULL;
679 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
681 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
682 gimple_seq_add_stmt (&new_body, new_stmt);
685 return new_body;
689 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
690 block using the mapping information in ID. */
692 static gimple
693 copy_gimple_bind (gimple stmt, copy_body_data *id)
695 gimple new_bind;
696 tree new_block, new_vars;
697 gimple_seq body, new_body;
699 /* Copy the statement. Note that we purposely don't use copy_stmt
700 here because we need to remap statements as we copy. */
701 body = gimple_bind_body (stmt);
702 new_body = remap_gimple_seq (body, id);
704 new_block = gimple_bind_block (stmt);
705 if (new_block)
706 remap_block (&new_block, id);
708 /* This will remap a lot of the same decls again, but this should be
709 harmless. */
710 new_vars = gimple_bind_vars (stmt);
711 if (new_vars)
712 new_vars = remap_decls (new_vars, NULL, id);
714 new_bind = gimple_build_bind (new_vars, new_body, new_block);
716 return new_bind;
720 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
721 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
722 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
723 recursing into the children nodes of *TP. */
725 static tree
726 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
728 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
729 copy_body_data *id = (copy_body_data *) wi_p->info;
730 tree fn = id->src_fn;
732 if (TREE_CODE (*tp) == SSA_NAME)
734 *tp = remap_ssa_name (*tp, id);
735 *walk_subtrees = 0;
736 return NULL;
738 else if (auto_var_in_fn_p (*tp, fn))
740 /* Local variables and labels need to be replaced by equivalent
741 variables. We don't want to copy static variables; there's
742 only one of those, no matter how many times we inline the
743 containing function. Similarly for globals from an outer
744 function. */
745 tree new_decl;
747 /* Remap the declaration. */
748 new_decl = remap_decl (*tp, id);
749 gcc_assert (new_decl);
750 /* Replace this variable with the copy. */
751 STRIP_TYPE_NOPS (new_decl);
752 /* ??? The C++ frontend uses void * pointer zero to initialize
753 any other type. This confuses the middle-end type verification.
754 As cloned bodies do not go through gimplification again the fixup
755 there doesn't trigger. */
756 if (TREE_CODE (new_decl) == INTEGER_CST
757 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
758 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
759 *tp = new_decl;
760 *walk_subtrees = 0;
762 else if (TREE_CODE (*tp) == STATEMENT_LIST)
763 gcc_unreachable ();
764 else if (TREE_CODE (*tp) == SAVE_EXPR)
765 gcc_unreachable ();
766 else if (TREE_CODE (*tp) == LABEL_DECL
767 && (!DECL_CONTEXT (*tp)
768 || decl_function_context (*tp) == id->src_fn))
769 /* These may need to be remapped for EH handling. */
770 *tp = remap_decl (*tp, id);
771 else if (TYPE_P (*tp))
772 /* Types may need remapping as well. */
773 *tp = remap_type (*tp, id);
774 else if (CONSTANT_CLASS_P (*tp))
776 /* If this is a constant, we have to copy the node iff the type
777 will be remapped. copy_tree_r will not copy a constant. */
778 tree new_type = remap_type (TREE_TYPE (*tp), id);
780 if (new_type == TREE_TYPE (*tp))
781 *walk_subtrees = 0;
783 else if (TREE_CODE (*tp) == INTEGER_CST)
784 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
785 TREE_INT_CST_HIGH (*tp));
786 else
788 *tp = copy_node (*tp);
789 TREE_TYPE (*tp) = new_type;
792 else
794 /* Otherwise, just copy the node. Note that copy_tree_r already
795 knows not to copy VAR_DECLs, etc., so this is safe. */
796 if (TREE_CODE (*tp) == INDIRECT_REF)
798 /* Get rid of *& from inline substitutions that can happen when a
799 pointer argument is an ADDR_EXPR. */
800 tree decl = TREE_OPERAND (*tp, 0);
801 tree *n;
803 n = (tree *) pointer_map_contains (id->decl_map, decl);
804 if (n)
806 tree type, new_tree, old;
808 /* If we happen to get an ADDR_EXPR in n->value, strip
809 it manually here as we'll eventually get ADDR_EXPRs
810 which lie about their types pointed to. In this case
811 build_fold_indirect_ref wouldn't strip the
812 INDIRECT_REF, but we absolutely rely on that. As
813 fold_indirect_ref does other useful transformations,
814 try that first, though. */
815 type = TREE_TYPE (TREE_TYPE (*n));
816 new_tree = unshare_expr (*n);
817 old = *tp;
818 *tp = gimple_fold_indirect_ref (new_tree);
819 if (!*tp)
821 if (TREE_CODE (new_tree) == ADDR_EXPR)
823 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
824 type, new_tree);
825 /* ??? We should either assert here or build
826 a VIEW_CONVERT_EXPR instead of blindly leaking
827 incompatible types to our IL. */
828 if (! *tp)
829 *tp = TREE_OPERAND (new_tree, 0);
831 else
833 *tp = build1 (INDIRECT_REF, type, new_tree);
834 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
835 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
838 *walk_subtrees = 0;
839 return NULL;
843 /* Here is the "usual case". Copy this tree node, and then
844 tweak some special cases. */
845 copy_tree_r (tp, walk_subtrees, NULL);
847 /* Global variables we haven't seen yet need to go into referenced
848 vars. If not referenced from types only. */
849 if (gimple_in_ssa_p (cfun)
850 && TREE_CODE (*tp) == VAR_DECL
851 && id->remapping_type_depth == 0
852 && !processing_debug_stmt)
853 add_referenced_var (*tp);
855 /* We should never have TREE_BLOCK set on non-statements. */
856 if (EXPR_P (*tp))
857 gcc_assert (!TREE_BLOCK (*tp));
859 if (TREE_CODE (*tp) != OMP_CLAUSE)
860 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
862 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
864 /* The copied TARGET_EXPR has never been expanded, even if the
865 original node was expanded already. */
866 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
867 TREE_OPERAND (*tp, 3) = NULL_TREE;
869 else if (TREE_CODE (*tp) == ADDR_EXPR)
871 /* Variable substitution need not be simple. In particular,
872 the INDIRECT_REF substitution above. Make sure that
873 TREE_CONSTANT and friends are up-to-date. But make sure
874 to not improperly set TREE_BLOCK on some sub-expressions. */
875 int invariant = is_gimple_min_invariant (*tp);
876 tree block = id->block;
877 id->block = NULL_TREE;
878 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
879 id->block = block;
881 /* Handle the case where we substituted an INDIRECT_REF
882 into the operand of the ADDR_EXPR. */
883 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
884 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
885 else
886 recompute_tree_invariant_for_addr_expr (*tp);
888 /* If this used to be invariant, but is not any longer,
889 then regimplification is probably needed. */
890 if (invariant && !is_gimple_min_invariant (*tp))
891 id->regimplify = true;
893 *walk_subtrees = 0;
897 /* Keep iterating. */
898 return NULL_TREE;
902 /* Called from copy_body_id via walk_tree. DATA is really a
903 `copy_body_data *'. */
905 tree
906 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
908 copy_body_data *id = (copy_body_data *) data;
909 tree fn = id->src_fn;
910 tree new_block;
912 /* Begin by recognizing trees that we'll completely rewrite for the
913 inlining context. Our output for these trees is completely
914 different from out input (e.g. RETURN_EXPR is deleted, and morphs
915 into an edge). Further down, we'll handle trees that get
916 duplicated and/or tweaked. */
918 /* When requested, RETURN_EXPRs should be transformed to just the
919 contained MODIFY_EXPR. The branch semantics of the return will
920 be handled elsewhere by manipulating the CFG rather than a statement. */
921 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
923 tree assignment = TREE_OPERAND (*tp, 0);
925 /* If we're returning something, just turn that into an
926 assignment into the equivalent of the original RESULT_DECL.
927 If the "assignment" is just the result decl, the result
928 decl has already been set (e.g. a recent "foo (&result_decl,
929 ...)"); just toss the entire RETURN_EXPR. */
930 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
932 /* Replace the RETURN_EXPR with (a copy of) the
933 MODIFY_EXPR hanging underneath. */
934 *tp = copy_node (assignment);
936 else /* Else the RETURN_EXPR returns no value. */
938 *tp = NULL;
939 return (tree) (void *)1;
942 else if (TREE_CODE (*tp) == SSA_NAME)
944 *tp = remap_ssa_name (*tp, id);
945 *walk_subtrees = 0;
946 return NULL;
949 /* Local variables and labels need to be replaced by equivalent
950 variables. We don't want to copy static variables; there's only
951 one of those, no matter how many times we inline the containing
952 function. Similarly for globals from an outer function. */
953 else if (auto_var_in_fn_p (*tp, fn))
955 tree new_decl;
957 /* Remap the declaration. */
958 new_decl = remap_decl (*tp, id);
959 gcc_assert (new_decl);
960 /* Replace this variable with the copy. */
961 STRIP_TYPE_NOPS (new_decl);
962 *tp = new_decl;
963 *walk_subtrees = 0;
965 else if (TREE_CODE (*tp) == STATEMENT_LIST)
966 copy_statement_list (tp);
967 else if (TREE_CODE (*tp) == SAVE_EXPR
968 || TREE_CODE (*tp) == TARGET_EXPR)
969 remap_save_expr (tp, id->decl_map, walk_subtrees);
970 else if (TREE_CODE (*tp) == LABEL_DECL
971 && (! DECL_CONTEXT (*tp)
972 || decl_function_context (*tp) == id->src_fn))
973 /* These may need to be remapped for EH handling. */
974 *tp = remap_decl (*tp, id);
975 else if (TREE_CODE (*tp) == BIND_EXPR)
976 copy_bind_expr (tp, walk_subtrees, id);
977 /* Types may need remapping as well. */
978 else if (TYPE_P (*tp))
979 *tp = remap_type (*tp, id);
981 /* If this is a constant, we have to copy the node iff the type will be
982 remapped. copy_tree_r will not copy a constant. */
983 else if (CONSTANT_CLASS_P (*tp))
985 tree new_type = remap_type (TREE_TYPE (*tp), id);
987 if (new_type == TREE_TYPE (*tp))
988 *walk_subtrees = 0;
990 else if (TREE_CODE (*tp) == INTEGER_CST)
991 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
992 TREE_INT_CST_HIGH (*tp));
993 else
995 *tp = copy_node (*tp);
996 TREE_TYPE (*tp) = new_type;
1000 /* Otherwise, just copy the node. Note that copy_tree_r already
1001 knows not to copy VAR_DECLs, etc., so this is safe. */
1002 else
1004 /* Here we handle trees that are not completely rewritten.
1005 First we detect some inlining-induced bogosities for
1006 discarding. */
1007 if (TREE_CODE (*tp) == MODIFY_EXPR
1008 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1009 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1011 /* Some assignments VAR = VAR; don't generate any rtl code
1012 and thus don't count as variable modification. Avoid
1013 keeping bogosities like 0 = 0. */
1014 tree decl = TREE_OPERAND (*tp, 0), value;
1015 tree *n;
1017 n = (tree *) pointer_map_contains (id->decl_map, decl);
1018 if (n)
1020 value = *n;
1021 STRIP_TYPE_NOPS (value);
1022 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1024 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1025 return copy_tree_body_r (tp, walk_subtrees, data);
1029 else if (TREE_CODE (*tp) == INDIRECT_REF)
1031 /* Get rid of *& from inline substitutions that can happen when a
1032 pointer argument is an ADDR_EXPR. */
1033 tree decl = TREE_OPERAND (*tp, 0);
1034 tree *n;
1036 n = (tree *) pointer_map_contains (id->decl_map, decl);
1037 if (n)
1039 tree new_tree;
1040 tree old;
1041 /* If we happen to get an ADDR_EXPR in n->value, strip
1042 it manually here as we'll eventually get ADDR_EXPRs
1043 which lie about their types pointed to. In this case
1044 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1045 but we absolutely rely on that. As fold_indirect_ref
1046 does other useful transformations, try that first, though. */
1047 tree type = TREE_TYPE (TREE_TYPE (*n));
1048 if (id->do_not_unshare)
1049 new_tree = *n;
1050 else
1051 new_tree = unshare_expr (*n);
1052 old = *tp;
1053 *tp = gimple_fold_indirect_ref (new_tree);
1054 if (! *tp)
1056 if (TREE_CODE (new_tree) == ADDR_EXPR)
1058 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1059 type, new_tree);
1060 /* ??? We should either assert here or build
1061 a VIEW_CONVERT_EXPR instead of blindly leaking
1062 incompatible types to our IL. */
1063 if (! *tp)
1064 *tp = TREE_OPERAND (new_tree, 0);
1066 else
1068 *tp = build1 (INDIRECT_REF, type, new_tree);
1069 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1070 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1073 *walk_subtrees = 0;
1074 return NULL;
1078 /* Here is the "usual case". Copy this tree node, and then
1079 tweak some special cases. */
1080 copy_tree_r (tp, walk_subtrees, NULL);
1082 /* Global variables we haven't seen yet needs to go into referenced
1083 vars. If not referenced from types or debug stmts only. */
1084 if (gimple_in_ssa_p (cfun)
1085 && TREE_CODE (*tp) == VAR_DECL
1086 && id->remapping_type_depth == 0
1087 && !processing_debug_stmt)
1088 add_referenced_var (*tp);
1090 /* If EXPR has block defined, map it to newly constructed block.
1091 When inlining we want EXPRs without block appear in the block
1092 of function call if we are not remapping a type. */
1093 if (EXPR_P (*tp))
1095 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1096 if (TREE_BLOCK (*tp))
1098 tree *n;
1099 n = (tree *) pointer_map_contains (id->decl_map,
1100 TREE_BLOCK (*tp));
1101 gcc_assert (n);
1102 new_block = *n;
1104 TREE_BLOCK (*tp) = new_block;
1107 if (TREE_CODE (*tp) != OMP_CLAUSE)
1108 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1110 /* The copied TARGET_EXPR has never been expanded, even if the
1111 original node was expanded already. */
1112 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1114 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1115 TREE_OPERAND (*tp, 3) = NULL_TREE;
1118 /* Variable substitution need not be simple. In particular, the
1119 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1120 and friends are up-to-date. */
1121 else if (TREE_CODE (*tp) == ADDR_EXPR)
1123 int invariant = is_gimple_min_invariant (*tp);
1124 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1126 /* Handle the case where we substituted an INDIRECT_REF
1127 into the operand of the ADDR_EXPR. */
1128 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1129 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1130 else
1131 recompute_tree_invariant_for_addr_expr (*tp);
1133 /* If this used to be invariant, but is not any longer,
1134 then regimplification is probably needed. */
1135 if (invariant && !is_gimple_min_invariant (*tp))
1136 id->regimplify = true;
1138 *walk_subtrees = 0;
1142 /* Keep iterating. */
1143 return NULL_TREE;
1146 /* Helper for remap_gimple_stmt. Given an EH region number for the
1147 source function, map that to the duplicate EH region number in
1148 the destination function. */
1150 static int
1151 remap_eh_region_nr (int old_nr, copy_body_data *id)
1153 eh_region old_r, new_r;
1154 void **slot;
1156 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1157 slot = pointer_map_contains (id->eh_map, old_r);
1158 new_r = (eh_region) *slot;
1160 return new_r->index;
1163 /* Similar, but operate on INTEGER_CSTs. */
1165 static tree
1166 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1168 int old_nr, new_nr;
1170 old_nr = tree_low_cst (old_t_nr, 0);
1171 new_nr = remap_eh_region_nr (old_nr, id);
1173 return build_int_cst (NULL, new_nr);
1176 /* Helper for copy_bb. Remap statement STMT using the inlining
1177 information in ID. Return the new statement copy. */
1179 static gimple
1180 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1182 gimple copy = NULL;
1183 struct walk_stmt_info wi;
1184 tree new_block;
1185 bool skip_first = false;
1187 /* Begin by recognizing trees that we'll completely rewrite for the
1188 inlining context. Our output for these trees is completely
1189 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1190 into an edge). Further down, we'll handle trees that get
1191 duplicated and/or tweaked. */
1193 /* When requested, GIMPLE_RETURNs should be transformed to just the
1194 contained GIMPLE_ASSIGN. The branch semantics of the return will
1195 be handled elsewhere by manipulating the CFG rather than the
1196 statement. */
1197 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1199 tree retval = gimple_return_retval (stmt);
1201 /* If we're returning something, just turn that into an
1202 assignment into the equivalent of the original RESULT_DECL.
1203 If RETVAL is just the result decl, the result decl has
1204 already been set (e.g. a recent "foo (&result_decl, ...)");
1205 just toss the entire GIMPLE_RETURN. */
1206 if (retval && TREE_CODE (retval) != RESULT_DECL)
1208 copy = gimple_build_assign (id->retvar, retval);
1209 /* id->retvar is already substituted. Skip it on later remapping. */
1210 skip_first = true;
1212 else
1213 return gimple_build_nop ();
1215 else if (gimple_has_substatements (stmt))
1217 gimple_seq s1, s2;
1219 /* When cloning bodies from the C++ front end, we will be handed bodies
1220 in High GIMPLE form. Handle here all the High GIMPLE statements that
1221 have embedded statements. */
1222 switch (gimple_code (stmt))
1224 case GIMPLE_BIND:
1225 copy = copy_gimple_bind (stmt, id);
1226 break;
1228 case GIMPLE_CATCH:
1229 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1230 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1231 break;
1233 case GIMPLE_EH_FILTER:
1234 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1235 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1236 break;
1238 case GIMPLE_TRY:
1239 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1240 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1241 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1242 break;
1244 case GIMPLE_WITH_CLEANUP_EXPR:
1245 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1246 copy = gimple_build_wce (s1);
1247 break;
1249 case GIMPLE_OMP_PARALLEL:
1250 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1251 copy = gimple_build_omp_parallel
1252 (s1,
1253 gimple_omp_parallel_clauses (stmt),
1254 gimple_omp_parallel_child_fn (stmt),
1255 gimple_omp_parallel_data_arg (stmt));
1256 break;
1258 case GIMPLE_OMP_TASK:
1259 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1260 copy = gimple_build_omp_task
1261 (s1,
1262 gimple_omp_task_clauses (stmt),
1263 gimple_omp_task_child_fn (stmt),
1264 gimple_omp_task_data_arg (stmt),
1265 gimple_omp_task_copy_fn (stmt),
1266 gimple_omp_task_arg_size (stmt),
1267 gimple_omp_task_arg_align (stmt));
1268 break;
1270 case GIMPLE_OMP_FOR:
1271 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1272 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1273 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1274 gimple_omp_for_collapse (stmt), s2);
1276 size_t i;
1277 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1279 gimple_omp_for_set_index (copy, i,
1280 gimple_omp_for_index (stmt, i));
1281 gimple_omp_for_set_initial (copy, i,
1282 gimple_omp_for_initial (stmt, i));
1283 gimple_omp_for_set_final (copy, i,
1284 gimple_omp_for_final (stmt, i));
1285 gimple_omp_for_set_incr (copy, i,
1286 gimple_omp_for_incr (stmt, i));
1287 gimple_omp_for_set_cond (copy, i,
1288 gimple_omp_for_cond (stmt, i));
1291 break;
1293 case GIMPLE_OMP_MASTER:
1294 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1295 copy = gimple_build_omp_master (s1);
1296 break;
1298 case GIMPLE_OMP_ORDERED:
1299 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1300 copy = gimple_build_omp_ordered (s1);
1301 break;
1303 case GIMPLE_OMP_SECTION:
1304 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1305 copy = gimple_build_omp_section (s1);
1306 break;
1308 case GIMPLE_OMP_SECTIONS:
1309 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1310 copy = gimple_build_omp_sections
1311 (s1, gimple_omp_sections_clauses (stmt));
1312 break;
1314 case GIMPLE_OMP_SINGLE:
1315 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1316 copy = gimple_build_omp_single
1317 (s1, gimple_omp_single_clauses (stmt));
1318 break;
1320 case GIMPLE_OMP_CRITICAL:
1321 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1322 copy
1323 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1324 break;
1326 default:
1327 gcc_unreachable ();
1330 else
1332 if (gimple_assign_copy_p (stmt)
1333 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1334 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1336 /* Here we handle statements that are not completely rewritten.
1337 First we detect some inlining-induced bogosities for
1338 discarding. */
1340 /* Some assignments VAR = VAR; don't generate any rtl code
1341 and thus don't count as variable modification. Avoid
1342 keeping bogosities like 0 = 0. */
1343 tree decl = gimple_assign_lhs (stmt), value;
1344 tree *n;
1346 n = (tree *) pointer_map_contains (id->decl_map, decl);
1347 if (n)
1349 value = *n;
1350 STRIP_TYPE_NOPS (value);
1351 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1352 return gimple_build_nop ();
1356 if (gimple_debug_bind_p (stmt))
1358 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1359 gimple_debug_bind_get_value (stmt),
1360 stmt);
1361 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1362 return copy;
1365 /* Create a new deep copy of the statement. */
1366 copy = gimple_copy (stmt);
1368 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1369 RESX and EH_DISPATCH. */
1370 if (id->eh_map)
1371 switch (gimple_code (copy))
1373 case GIMPLE_CALL:
1375 tree r, fndecl = gimple_call_fndecl (copy);
1376 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1377 switch (DECL_FUNCTION_CODE (fndecl))
1379 case BUILT_IN_EH_COPY_VALUES:
1380 r = gimple_call_arg (copy, 1);
1381 r = remap_eh_region_tree_nr (r, id);
1382 gimple_call_set_arg (copy, 1, r);
1383 /* FALLTHRU */
1385 case BUILT_IN_EH_POINTER:
1386 case BUILT_IN_EH_FILTER:
1387 r = gimple_call_arg (copy, 0);
1388 r = remap_eh_region_tree_nr (r, id);
1389 gimple_call_set_arg (copy, 0, r);
1390 break;
1392 default:
1393 break;
1396 break;
1398 case GIMPLE_RESX:
1400 int r = gimple_resx_region (copy);
1401 r = remap_eh_region_nr (r, id);
1402 gimple_resx_set_region (copy, r);
1404 break;
1406 case GIMPLE_EH_DISPATCH:
1408 int r = gimple_eh_dispatch_region (copy);
1409 r = remap_eh_region_nr (r, id);
1410 gimple_eh_dispatch_set_region (copy, r);
1412 break;
1414 default:
1415 break;
1419 /* If STMT has a block defined, map it to the newly constructed
1420 block. When inlining we want statements without a block to
1421 appear in the block of the function call. */
1422 new_block = id->block;
1423 if (gimple_block (copy))
1425 tree *n;
1426 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1427 gcc_assert (n);
1428 new_block = *n;
1431 gimple_set_block (copy, new_block);
1433 if (gimple_debug_bind_p (copy))
1434 return copy;
1436 /* Remap all the operands in COPY. */
1437 memset (&wi, 0, sizeof (wi));
1438 wi.info = id;
1439 if (skip_first)
1440 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1441 else
1442 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1444 /* Clear the copied virtual operands. We are not remapping them here
1445 but are going to recreate them from scratch. */
1446 if (gimple_has_mem_ops (copy))
1448 gimple_set_vdef (copy, NULL_TREE);
1449 gimple_set_vuse (copy, NULL_TREE);
1452 return copy;
1456 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1457 later */
1459 static basic_block
1460 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1461 gcov_type count_scale)
1463 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1464 basic_block copy_basic_block;
1465 tree decl;
1466 gcov_type freq;
1468 /* create_basic_block() will append every new block to
1469 basic_block_info automatically. */
1470 copy_basic_block = create_basic_block (NULL, (void *) 0,
1471 (basic_block) bb->prev_bb->aux);
1472 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1474 /* We are going to rebuild frequencies from scratch. These values
1475 have just small importance to drive canonicalize_loop_headers. */
1476 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1478 /* We recompute frequencies after inlining, so this is quite safe. */
1479 if (freq > BB_FREQ_MAX)
1480 freq = BB_FREQ_MAX;
1481 copy_basic_block->frequency = freq;
1483 copy_gsi = gsi_start_bb (copy_basic_block);
1485 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1487 gimple stmt = gsi_stmt (gsi);
1488 gimple orig_stmt = stmt;
1490 id->regimplify = false;
1491 stmt = remap_gimple_stmt (stmt, id);
1492 if (gimple_nop_p (stmt))
1493 continue;
1495 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1496 seq_gsi = copy_gsi;
1498 /* With return slot optimization we can end up with
1499 non-gimple (foo *)&this->m, fix that here. */
1500 if (is_gimple_assign (stmt)
1501 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1502 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1504 tree new_rhs;
1505 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1506 gimple_assign_rhs1 (stmt),
1507 true, NULL, false, GSI_NEW_STMT);
1508 gimple_assign_set_rhs1 (stmt, new_rhs);
1509 id->regimplify = false;
1512 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1514 if (id->regimplify)
1515 gimple_regimplify_operands (stmt, &seq_gsi);
1517 /* If copy_basic_block has been empty at the start of this iteration,
1518 call gsi_start_bb again to get at the newly added statements. */
1519 if (gsi_end_p (copy_gsi))
1520 copy_gsi = gsi_start_bb (copy_basic_block);
1521 else
1522 gsi_next (&copy_gsi);
1524 /* Process the new statement. The call to gimple_regimplify_operands
1525 possibly turned the statement into multiple statements, we
1526 need to process all of them. */
1529 tree fn;
1531 stmt = gsi_stmt (copy_gsi);
1532 if (is_gimple_call (stmt)
1533 && gimple_call_va_arg_pack_p (stmt)
1534 && id->gimple_call)
1536 /* __builtin_va_arg_pack () should be replaced by
1537 all arguments corresponding to ... in the caller. */
1538 tree p;
1539 gimple new_call;
1540 VEC(tree, heap) *argarray;
1541 size_t nargs = gimple_call_num_args (id->gimple_call);
1542 size_t n;
1544 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1545 nargs--;
1547 /* Create the new array of arguments. */
1548 n = nargs + gimple_call_num_args (stmt);
1549 argarray = VEC_alloc (tree, heap, n);
1550 VEC_safe_grow (tree, heap, argarray, n);
1552 /* Copy all the arguments before '...' */
1553 memcpy (VEC_address (tree, argarray),
1554 gimple_call_arg_ptr (stmt, 0),
1555 gimple_call_num_args (stmt) * sizeof (tree));
1557 /* Append the arguments passed in '...' */
1558 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1559 gimple_call_arg_ptr (id->gimple_call, 0)
1560 + (gimple_call_num_args (id->gimple_call) - nargs),
1561 nargs * sizeof (tree));
1563 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1564 argarray);
1566 VEC_free (tree, heap, argarray);
1568 /* Copy all GIMPLE_CALL flags, location and block, except
1569 GF_CALL_VA_ARG_PACK. */
1570 gimple_call_copy_flags (new_call, stmt);
1571 gimple_call_set_va_arg_pack (new_call, false);
1572 gimple_set_location (new_call, gimple_location (stmt));
1573 gimple_set_block (new_call, gimple_block (stmt));
1574 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1576 gsi_replace (&copy_gsi, new_call, false);
1577 gimple_set_bb (stmt, NULL);
1578 stmt = new_call;
1580 else if (is_gimple_call (stmt)
1581 && id->gimple_call
1582 && (decl = gimple_call_fndecl (stmt))
1583 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1584 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1586 /* __builtin_va_arg_pack_len () should be replaced by
1587 the number of anonymous arguments. */
1588 size_t nargs = gimple_call_num_args (id->gimple_call);
1589 tree count, p;
1590 gimple new_stmt;
1592 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1593 nargs--;
1595 count = build_int_cst (integer_type_node, nargs);
1596 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1597 gsi_replace (&copy_gsi, new_stmt, false);
1598 stmt = new_stmt;
1601 /* Statements produced by inlining can be unfolded, especially
1602 when we constant propagated some operands. We can't fold
1603 them right now for two reasons:
1604 1) folding require SSA_NAME_DEF_STMTs to be correct
1605 2) we can't change function calls to builtins.
1606 So we just mark statement for later folding. We mark
1607 all new statements, instead just statements that has changed
1608 by some nontrivial substitution so even statements made
1609 foldable indirectly are updated. If this turns out to be
1610 expensive, copy_body can be told to watch for nontrivial
1611 changes. */
1612 if (id->statements_to_fold)
1613 pointer_set_insert (id->statements_to_fold, stmt);
1615 /* We're duplicating a CALL_EXPR. Find any corresponding
1616 callgraph edges and update or duplicate them. */
1617 if (is_gimple_call (stmt))
1619 struct cgraph_edge *edge;
1620 int flags;
1622 switch (id->transform_call_graph_edges)
1624 case CB_CGE_DUPLICATE:
1625 edge = cgraph_edge (id->src_node, orig_stmt);
1626 if (edge)
1628 int edge_freq = edge->frequency;
1629 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1630 gimple_uid (stmt),
1631 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1632 edge->frequency, true);
1633 /* We could also just rescale the frequency, but
1634 doing so would introduce roundoff errors and make
1635 verifier unhappy. */
1636 edge->frequency
1637 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1638 copy_basic_block);
1639 if (dump_file
1640 && profile_status_for_function (cfun) != PROFILE_ABSENT
1641 && (edge_freq > edge->frequency + 10
1642 || edge_freq < edge->frequency - 10))
1644 fprintf (dump_file, "Edge frequency estimated by "
1645 "cgraph %i diverge from inliner's estimate %i\n",
1646 edge_freq,
1647 edge->frequency);
1648 fprintf (dump_file,
1649 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1650 bb->index,
1651 bb->frequency,
1652 copy_basic_block->frequency);
1655 break;
1657 case CB_CGE_MOVE_CLONES:
1658 cgraph_set_call_stmt_including_clones (id->dst_node,
1659 orig_stmt, stmt);
1660 edge = cgraph_edge (id->dst_node, stmt);
1661 break;
1663 case CB_CGE_MOVE:
1664 edge = cgraph_edge (id->dst_node, orig_stmt);
1665 if (edge)
1666 cgraph_set_call_stmt (edge, stmt);
1667 break;
1669 default:
1670 gcc_unreachable ();
1673 /* Constant propagation on argument done during inlining
1674 may create new direct call. Produce an edge for it. */
1675 if ((!edge
1676 || (edge->indirect_call
1677 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1678 && is_gimple_call (stmt)
1679 && (fn = gimple_call_fndecl (stmt)) != NULL)
1681 struct cgraph_node *dest = cgraph_node (fn);
1683 /* We have missing edge in the callgraph. This can happen
1684 when previous inlining turned an indirect call into a
1685 direct call by constant propagating arguments or we are
1686 producing dead clone (for further clonning). In all
1687 other cases we hit a bug (incorrect node sharing is the
1688 most common reason for missing edges). */
1689 gcc_assert (dest->needed || !dest->analyzed
1690 || !id->src_node->analyzed);
1691 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1692 cgraph_create_edge_including_clones
1693 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1694 compute_call_stmt_bb_frequency (id->dst_node->decl,
1695 copy_basic_block),
1696 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1697 else
1698 cgraph_create_edge (id->dst_node, dest, stmt,
1699 bb->count,
1700 compute_call_stmt_bb_frequency
1701 (id->dst_node->decl, copy_basic_block),
1702 bb->loop_depth)->inline_failed
1703 = CIF_ORIGINALLY_INDIRECT_CALL;
1704 if (dump_file)
1706 fprintf (dump_file, "Created new direct edge to %s",
1707 cgraph_node_name (dest));
1711 flags = gimple_call_flags (stmt);
1712 if (flags & ECF_MAY_BE_ALLOCA)
1713 cfun->calls_alloca = true;
1714 if (flags & ECF_RETURNS_TWICE)
1715 cfun->calls_setjmp = true;
1718 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1719 id->eh_map, id->eh_lp_nr);
1721 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1723 ssa_op_iter i;
1724 tree def;
1726 find_new_referenced_vars (gsi_stmt (copy_gsi));
1727 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1728 if (TREE_CODE (def) == SSA_NAME)
1729 SSA_NAME_DEF_STMT (def) = stmt;
1732 gsi_next (&copy_gsi);
1734 while (!gsi_end_p (copy_gsi));
1736 copy_gsi = gsi_last_bb (copy_basic_block);
1739 return copy_basic_block;
1742 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1743 form is quite easy, since dominator relationship for old basic blocks does
1744 not change.
1746 There is however exception where inlining might change dominator relation
1747 across EH edges from basic block within inlined functions destinating
1748 to landing pads in function we inline into.
1750 The function fills in PHI_RESULTs of such PHI nodes if they refer
1751 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1752 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1753 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1754 set, and this means that there will be no overlapping live ranges
1755 for the underlying symbol.
1757 This might change in future if we allow redirecting of EH edges and
1758 we might want to change way build CFG pre-inlining to include
1759 all the possible edges then. */
1760 static void
1761 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1762 bool can_throw, bool nonlocal_goto)
1764 edge e;
1765 edge_iterator ei;
1767 FOR_EACH_EDGE (e, ei, bb->succs)
1768 if (!e->dest->aux
1769 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1771 gimple phi;
1772 gimple_stmt_iterator si;
1774 if (!nonlocal_goto)
1775 gcc_assert (e->flags & EDGE_EH);
1777 if (!can_throw)
1778 gcc_assert (!(e->flags & EDGE_EH));
1780 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1782 edge re;
1784 phi = gsi_stmt (si);
1786 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1787 gcc_assert (!e->dest->aux);
1789 gcc_assert ((e->flags & EDGE_EH)
1790 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1792 if (!is_gimple_reg (PHI_RESULT (phi)))
1794 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1795 continue;
1798 re = find_edge (ret_bb, e->dest);
1799 gcc_assert (re);
1800 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1801 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1803 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1804 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1810 /* Copy edges from BB into its copy constructed earlier, scale profile
1811 accordingly. Edges will be taken care of later. Assume aux
1812 pointers to point to the copies of each BB. */
1814 static void
1815 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1817 basic_block new_bb = (basic_block) bb->aux;
1818 edge_iterator ei;
1819 edge old_edge;
1820 gimple_stmt_iterator si;
1821 int flags;
1823 /* Use the indices from the original blocks to create edges for the
1824 new ones. */
1825 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1826 if (!(old_edge->flags & EDGE_EH))
1828 edge new_edge;
1830 flags = old_edge->flags;
1832 /* Return edges do get a FALLTHRU flag when the get inlined. */
1833 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1834 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1835 flags |= EDGE_FALLTHRU;
1836 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1837 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1838 new_edge->probability = old_edge->probability;
1841 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1842 return;
1844 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1846 gimple copy_stmt;
1847 bool can_throw, nonlocal_goto;
1849 copy_stmt = gsi_stmt (si);
1850 if (!is_gimple_debug (copy_stmt))
1852 update_stmt (copy_stmt);
1853 if (gimple_in_ssa_p (cfun))
1854 mark_symbols_for_renaming (copy_stmt);
1857 /* Do this before the possible split_block. */
1858 gsi_next (&si);
1860 /* If this tree could throw an exception, there are two
1861 cases where we need to add abnormal edge(s): the
1862 tree wasn't in a region and there is a "current
1863 region" in the caller; or the original tree had
1864 EH edges. In both cases split the block after the tree,
1865 and add abnormal edge(s) as needed; we need both
1866 those from the callee and the caller.
1867 We check whether the copy can throw, because the const
1868 propagation can change an INDIRECT_REF which throws
1869 into a COMPONENT_REF which doesn't. If the copy
1870 can throw, the original could also throw. */
1871 can_throw = stmt_can_throw_internal (copy_stmt);
1872 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1874 if (can_throw || nonlocal_goto)
1876 if (!gsi_end_p (si))
1877 /* Note that bb's predecessor edges aren't necessarily
1878 right at this point; split_block doesn't care. */
1880 edge e = split_block (new_bb, copy_stmt);
1882 new_bb = e->dest;
1883 new_bb->aux = e->src->aux;
1884 si = gsi_start_bb (new_bb);
1888 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1889 make_eh_dispatch_edges (copy_stmt);
1890 else if (can_throw)
1891 make_eh_edges (copy_stmt);
1893 if (nonlocal_goto)
1894 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1896 if ((can_throw || nonlocal_goto)
1897 && gimple_in_ssa_p (cfun))
1898 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1899 can_throw, nonlocal_goto);
1903 /* Copy the PHIs. All blocks and edges are copied, some blocks
1904 was possibly split and new outgoing EH edges inserted.
1905 BB points to the block of original function and AUX pointers links
1906 the original and newly copied blocks. */
1908 static void
1909 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1911 basic_block const new_bb = (basic_block) bb->aux;
1912 edge_iterator ei;
1913 gimple phi;
1914 gimple_stmt_iterator si;
1916 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1918 tree res, new_res;
1919 gimple new_phi;
1920 edge new_edge;
1922 phi = gsi_stmt (si);
1923 res = PHI_RESULT (phi);
1924 new_res = res;
1925 if (is_gimple_reg (res))
1927 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1928 SSA_NAME_DEF_STMT (new_res)
1929 = new_phi = create_phi_node (new_res, new_bb);
1930 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1932 edge const old_edge
1933 = find_edge ((basic_block) new_edge->src->aux, bb);
1934 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1935 tree new_arg = arg;
1936 tree block = id->block;
1937 id->block = NULL_TREE;
1938 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1939 id->block = block;
1940 gcc_assert (new_arg);
1941 /* With return slot optimization we can end up with
1942 non-gimple (foo *)&this->m, fix that here. */
1943 if (TREE_CODE (new_arg) != SSA_NAME
1944 && TREE_CODE (new_arg) != FUNCTION_DECL
1945 && !is_gimple_val (new_arg))
1947 gimple_seq stmts = NULL;
1948 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1949 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1951 add_phi_arg (new_phi, new_arg, new_edge,
1952 gimple_phi_arg_location_from_edge (phi, old_edge));
1959 /* Wrapper for remap_decl so it can be used as a callback. */
1961 static tree
1962 remap_decl_1 (tree decl, void *data)
1964 return remap_decl (decl, (copy_body_data *) data);
1967 /* Build struct function and associated datastructures for the new clone
1968 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1970 static void
1971 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
1973 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1974 gcov_type count_scale;
1976 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1977 count_scale = (REG_BR_PROB_BASE * count
1978 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1979 else
1980 count_scale = REG_BR_PROB_BASE;
1982 /* Register specific tree functions. */
1983 gimple_register_cfg_hooks ();
1985 /* Get clean struct function. */
1986 push_struct_function (new_fndecl);
1988 /* We will rebuild these, so just sanity check that they are empty. */
1989 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
1990 gcc_assert (cfun->local_decls == NULL);
1991 gcc_assert (cfun->cfg == NULL);
1992 gcc_assert (cfun->decl == new_fndecl);
1994 /* Copy items we preserve during clonning. */
1995 cfun->static_chain_decl = src_cfun->static_chain_decl;
1996 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
1997 cfun->function_end_locus = src_cfun->function_end_locus;
1998 cfun->curr_properties = src_cfun->curr_properties;
1999 cfun->last_verified = src_cfun->last_verified;
2000 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2001 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2002 cfun->function_frequency = src_cfun->function_frequency;
2003 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2004 cfun->stdarg = src_cfun->stdarg;
2005 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2006 cfun->after_inlining = src_cfun->after_inlining;
2007 cfun->returns_struct = src_cfun->returns_struct;
2008 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2009 cfun->after_tree_profile = src_cfun->after_tree_profile;
2011 init_empty_tree_cfg ();
2013 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2014 ENTRY_BLOCK_PTR->count =
2015 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2016 REG_BR_PROB_BASE);
2017 ENTRY_BLOCK_PTR->frequency
2018 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2019 EXIT_BLOCK_PTR->count =
2020 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2021 REG_BR_PROB_BASE);
2022 EXIT_BLOCK_PTR->frequency =
2023 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2024 if (src_cfun->eh)
2025 init_eh_for_function ();
2027 if (src_cfun->gimple_df)
2029 init_tree_ssa (cfun);
2030 cfun->gimple_df->in_ssa_p = true;
2031 init_ssa_operands ();
2033 pop_cfun ();
2036 /* Make a copy of the body of FN so that it can be inserted inline in
2037 another function. Walks FN via CFG, returns new fndecl. */
2039 static tree
2040 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2041 basic_block entry_block_map, basic_block exit_block_map)
2043 tree callee_fndecl = id->src_fn;
2044 /* Original cfun for the callee, doesn't change. */
2045 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2046 struct function *cfun_to_copy;
2047 basic_block bb;
2048 tree new_fndecl = NULL;
2049 gcov_type count_scale;
2050 int last;
2052 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2053 count_scale = (REG_BR_PROB_BASE * count
2054 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2055 else
2056 count_scale = REG_BR_PROB_BASE;
2058 /* Register specific tree functions. */
2059 gimple_register_cfg_hooks ();
2061 /* Must have a CFG here at this point. */
2062 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2063 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2065 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2067 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2068 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2069 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2070 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2072 /* Duplicate any exception-handling regions. */
2073 if (cfun->eh)
2074 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2075 remap_decl_1, id);
2077 /* Use aux pointers to map the original blocks to copy. */
2078 FOR_EACH_BB_FN (bb, cfun_to_copy)
2080 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2081 bb->aux = new_bb;
2082 new_bb->aux = bb;
2085 last = last_basic_block;
2087 /* Now that we've duplicated the blocks, duplicate their edges. */
2088 FOR_ALL_BB_FN (bb, cfun_to_copy)
2089 copy_edges_for_bb (bb, count_scale, exit_block_map);
2091 if (gimple_in_ssa_p (cfun))
2092 FOR_ALL_BB_FN (bb, cfun_to_copy)
2093 copy_phis_for_bb (bb, id);
2095 FOR_ALL_BB_FN (bb, cfun_to_copy)
2097 ((basic_block)bb->aux)->aux = NULL;
2098 bb->aux = NULL;
2101 /* Zero out AUX fields of newly created block during EH edge
2102 insertion. */
2103 for (; last < last_basic_block; last++)
2104 BASIC_BLOCK (last)->aux = NULL;
2105 entry_block_map->aux = NULL;
2106 exit_block_map->aux = NULL;
2108 if (id->eh_map)
2110 pointer_map_destroy (id->eh_map);
2111 id->eh_map = NULL;
2114 return new_fndecl;
2117 /* Copy the debug STMT using ID. We deal with these statements in a
2118 special way: if any variable in their VALUE expression wasn't
2119 remapped yet, we won't remap it, because that would get decl uids
2120 out of sync, causing codegen differences between -g and -g0. If
2121 this arises, we drop the VALUE expression altogether. */
2123 static void
2124 copy_debug_stmt (gimple stmt, copy_body_data *id)
2126 tree t, *n;
2127 struct walk_stmt_info wi;
2129 t = id->block;
2130 if (gimple_block (stmt))
2132 tree *n;
2133 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2134 if (n)
2135 t = *n;
2137 gimple_set_block (stmt, t);
2139 /* Remap all the operands in COPY. */
2140 memset (&wi, 0, sizeof (wi));
2141 wi.info = id;
2143 processing_debug_stmt = 1;
2145 t = gimple_debug_bind_get_var (stmt);
2147 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2148 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2150 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2151 t = *n;
2153 else if (TREE_CODE (t) == VAR_DECL
2154 && !TREE_STATIC (t)
2155 && gimple_in_ssa_p (cfun)
2156 && !pointer_map_contains (id->decl_map, t)
2157 && !var_ann (t))
2158 /* T is a non-localized variable. */;
2159 else
2160 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2162 gimple_debug_bind_set_var (stmt, t);
2164 if (gimple_debug_bind_has_value_p (stmt))
2165 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2166 remap_gimple_op_r, &wi, NULL);
2168 /* Punt if any decl couldn't be remapped. */
2169 if (processing_debug_stmt < 0)
2170 gimple_debug_bind_reset_value (stmt);
2172 processing_debug_stmt = 0;
2174 update_stmt (stmt);
2175 if (gimple_in_ssa_p (cfun))
2176 mark_symbols_for_renaming (stmt);
2179 /* Process deferred debug stmts. In order to give values better odds
2180 of being successfully remapped, we delay the processing of debug
2181 stmts until all other stmts that might require remapping are
2182 processed. */
2184 static void
2185 copy_debug_stmts (copy_body_data *id)
2187 size_t i;
2188 gimple stmt;
2190 if (!id->debug_stmts)
2191 return;
2193 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2194 copy_debug_stmt (stmt, id);
2196 VEC_free (gimple, heap, id->debug_stmts);
2199 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2200 another function. */
2202 static tree
2203 copy_tree_body (copy_body_data *id)
2205 tree fndecl = id->src_fn;
2206 tree body = DECL_SAVED_TREE (fndecl);
2208 walk_tree (&body, copy_tree_body_r, id, NULL);
2210 return body;
2213 /* Make a copy of the body of FN so that it can be inserted inline in
2214 another function. */
2216 static tree
2217 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2218 basic_block entry_block_map, basic_block exit_block_map)
2220 tree fndecl = id->src_fn;
2221 tree body;
2223 /* If this body has a CFG, walk CFG and copy. */
2224 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2225 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map);
2226 copy_debug_stmts (id);
2228 return body;
2231 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2232 defined in function FN, or of a data member thereof. */
2234 static bool
2235 self_inlining_addr_expr (tree value, tree fn)
2237 tree var;
2239 if (TREE_CODE (value) != ADDR_EXPR)
2240 return false;
2242 var = get_base_address (TREE_OPERAND (value, 0));
2244 return var && auto_var_in_fn_p (var, fn);
2247 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2248 lexical block and line number information from base_stmt, if given,
2249 or from the last stmt of the block otherwise. */
2251 static gimple
2252 insert_init_debug_bind (copy_body_data *id,
2253 basic_block bb, tree var, tree value,
2254 gimple base_stmt)
2256 gimple note;
2257 gimple_stmt_iterator gsi;
2258 tree tracked_var;
2260 if (!gimple_in_ssa_p (id->src_cfun))
2261 return NULL;
2263 if (!MAY_HAVE_DEBUG_STMTS)
2264 return NULL;
2266 tracked_var = target_for_debug_bind (var);
2267 if (!tracked_var)
2268 return NULL;
2270 if (bb)
2272 gsi = gsi_last_bb (bb);
2273 if (!base_stmt && !gsi_end_p (gsi))
2274 base_stmt = gsi_stmt (gsi);
2277 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2279 if (bb)
2281 if (!gsi_end_p (gsi))
2282 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2283 else
2284 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2287 return note;
2290 static void
2291 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2293 /* If VAR represents a zero-sized variable, it's possible that the
2294 assignment statement may result in no gimple statements. */
2295 if (init_stmt)
2297 gimple_stmt_iterator si = gsi_last_bb (bb);
2299 /* We can end up with init statements that store to a non-register
2300 from a rhs with a conversion. Handle that here by forcing the
2301 rhs into a temporary. gimple_regimplify_operands is not
2302 prepared to do this for us. */
2303 if (!is_gimple_debug (init_stmt)
2304 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2305 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2306 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2308 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2309 gimple_expr_type (init_stmt),
2310 gimple_assign_rhs1 (init_stmt));
2311 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2312 GSI_NEW_STMT);
2313 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2314 gimple_assign_set_rhs1 (init_stmt, rhs);
2316 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2317 gimple_regimplify_operands (init_stmt, &si);
2318 mark_symbols_for_renaming (init_stmt);
2320 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2322 tree var, def = gimple_assign_lhs (init_stmt);
2324 if (TREE_CODE (def) == SSA_NAME)
2325 var = SSA_NAME_VAR (def);
2326 else
2327 var = def;
2329 insert_init_debug_bind (id, bb, var, def, init_stmt);
2334 /* Initialize parameter P with VALUE. If needed, produce init statement
2335 at the end of BB. When BB is NULL, we return init statement to be
2336 output later. */
2337 static gimple
2338 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2339 basic_block bb, tree *vars)
2341 gimple init_stmt = NULL;
2342 tree var;
2343 tree rhs = value;
2344 tree def = (gimple_in_ssa_p (cfun)
2345 ? gimple_default_def (id->src_cfun, p) : NULL);
2347 if (value
2348 && value != error_mark_node
2349 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2351 if (fold_convertible_p (TREE_TYPE (p), value))
2352 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2353 else
2354 /* ??? For valid (GIMPLE) programs we should not end up here.
2355 Still if something has gone wrong and we end up with truly
2356 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2357 to not leak invalid GIMPLE to the following passes. */
2358 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2361 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2362 here since the type of this decl must be visible to the calling
2363 function. */
2364 var = copy_decl_to_var (p, id);
2366 /* We're actually using the newly-created var. */
2367 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2369 get_var_ann (var);
2370 add_referenced_var (var);
2373 /* Declare this new variable. */
2374 TREE_CHAIN (var) = *vars;
2375 *vars = var;
2377 /* Make gimplifier happy about this variable. */
2378 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2380 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2381 we would not need to create a new variable here at all, if it
2382 weren't for debug info. Still, we can just use the argument
2383 value. */
2384 if (TREE_READONLY (p)
2385 && !TREE_ADDRESSABLE (p)
2386 && value && !TREE_SIDE_EFFECTS (value)
2387 && !def)
2389 /* We may produce non-gimple trees by adding NOPs or introduce
2390 invalid sharing when operand is not really constant.
2391 It is not big deal to prohibit constant propagation here as
2392 we will constant propagate in DOM1 pass anyway. */
2393 if (is_gimple_min_invariant (value)
2394 && useless_type_conversion_p (TREE_TYPE (p),
2395 TREE_TYPE (value))
2396 /* We have to be very careful about ADDR_EXPR. Make sure
2397 the base variable isn't a local variable of the inlined
2398 function, e.g., when doing recursive inlining, direct or
2399 mutually-recursive or whatever, which is why we don't
2400 just test whether fn == current_function_decl. */
2401 && ! self_inlining_addr_expr (value, fn))
2403 insert_decl_map (id, p, value);
2404 insert_debug_decl_map (id, p, var);
2405 return insert_init_debug_bind (id, bb, var, value, NULL);
2409 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2410 that way, when the PARM_DECL is encountered, it will be
2411 automatically replaced by the VAR_DECL. */
2412 insert_decl_map (id, p, var);
2414 /* Even if P was TREE_READONLY, the new VAR should not be.
2415 In the original code, we would have constructed a
2416 temporary, and then the function body would have never
2417 changed the value of P. However, now, we will be
2418 constructing VAR directly. The constructor body may
2419 change its value multiple times as it is being
2420 constructed. Therefore, it must not be TREE_READONLY;
2421 the back-end assumes that TREE_READONLY variable is
2422 assigned to only once. */
2423 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2424 TREE_READONLY (var) = 0;
2426 /* If there is no setup required and we are in SSA, take the easy route
2427 replacing all SSA names representing the function parameter by the
2428 SSA name passed to function.
2430 We need to construct map for the variable anyway as it might be used
2431 in different SSA names when parameter is set in function.
2433 Do replacement at -O0 for const arguments replaced by constant.
2434 This is important for builtin_constant_p and other construct requiring
2435 constant argument to be visible in inlined function body. */
2436 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2437 && (optimize
2438 || (TREE_READONLY (p)
2439 && is_gimple_min_invariant (rhs)))
2440 && (TREE_CODE (rhs) == SSA_NAME
2441 || is_gimple_min_invariant (rhs))
2442 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2444 insert_decl_map (id, def, rhs);
2445 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2448 /* If the value of argument is never used, don't care about initializing
2449 it. */
2450 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2452 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2453 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2456 /* Initialize this VAR_DECL from the equivalent argument. Convert
2457 the argument to the proper type in case it was promoted. */
2458 if (value)
2460 if (rhs == error_mark_node)
2462 insert_decl_map (id, p, var);
2463 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2466 STRIP_USELESS_TYPE_CONVERSION (rhs);
2468 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2469 keep our trees in gimple form. */
2470 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2472 def = remap_ssa_name (def, id);
2473 init_stmt = gimple_build_assign (def, rhs);
2474 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2475 set_default_def (var, NULL);
2477 else
2478 init_stmt = gimple_build_assign (var, rhs);
2480 if (bb && init_stmt)
2481 insert_init_stmt (id, bb, init_stmt);
2483 return init_stmt;
2486 /* Generate code to initialize the parameters of the function at the
2487 top of the stack in ID from the GIMPLE_CALL STMT. */
2489 static void
2490 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2491 tree fn, basic_block bb)
2493 tree parms;
2494 size_t i;
2495 tree p;
2496 tree vars = NULL_TREE;
2497 tree static_chain = gimple_call_chain (stmt);
2499 /* Figure out what the parameters are. */
2500 parms = DECL_ARGUMENTS (fn);
2502 /* Loop through the parameter declarations, replacing each with an
2503 equivalent VAR_DECL, appropriately initialized. */
2504 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2506 tree val;
2507 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2508 setup_one_parameter (id, p, val, fn, bb, &vars);
2511 /* Initialize the static chain. */
2512 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2513 gcc_assert (fn != current_function_decl);
2514 if (p)
2516 /* No static chain? Seems like a bug in tree-nested.c. */
2517 gcc_assert (static_chain);
2519 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2522 declare_inline_vars (id->block, vars);
2526 /* Declare a return variable to replace the RESULT_DECL for the
2527 function we are calling. An appropriate DECL_STMT is returned.
2528 The USE_STMT is filled to contain a use of the declaration to
2529 indicate the return value of the function.
2531 RETURN_SLOT, if non-null is place where to store the result. It
2532 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2533 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2535 The return value is a (possibly null) value that holds the result
2536 as seen by the caller. */
2538 static tree
2539 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
2541 tree callee = id->src_fn;
2542 tree caller = id->dst_fn;
2543 tree result = DECL_RESULT (callee);
2544 tree callee_type = TREE_TYPE (result);
2545 tree caller_type;
2546 tree var, use;
2548 /* Handle type-mismatches in the function declaration return type
2549 vs. the call expression. */
2550 if (modify_dest)
2551 caller_type = TREE_TYPE (modify_dest);
2552 else
2553 caller_type = TREE_TYPE (TREE_TYPE (callee));
2555 /* We don't need to do anything for functions that don't return
2556 anything. */
2557 if (!result || VOID_TYPE_P (callee_type))
2558 return NULL_TREE;
2560 /* If there was a return slot, then the return value is the
2561 dereferenced address of that object. */
2562 if (return_slot)
2564 /* The front end shouldn't have used both return_slot and
2565 a modify expression. */
2566 gcc_assert (!modify_dest);
2567 if (DECL_BY_REFERENCE (result))
2569 tree return_slot_addr = build_fold_addr_expr (return_slot);
2570 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2572 /* We are going to construct *&return_slot and we can't do that
2573 for variables believed to be not addressable.
2575 FIXME: This check possibly can match, because values returned
2576 via return slot optimization are not believed to have address
2577 taken by alias analysis. */
2578 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2579 if (gimple_in_ssa_p (cfun))
2581 HOST_WIDE_INT bitsize;
2582 HOST_WIDE_INT bitpos;
2583 tree offset;
2584 enum machine_mode mode;
2585 int unsignedp;
2586 int volatilep;
2587 tree base;
2588 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2589 &offset,
2590 &mode, &unsignedp, &volatilep,
2591 false);
2592 if (TREE_CODE (base) == INDIRECT_REF)
2593 base = TREE_OPERAND (base, 0);
2594 if (TREE_CODE (base) == SSA_NAME)
2595 base = SSA_NAME_VAR (base);
2596 mark_sym_for_renaming (base);
2598 var = return_slot_addr;
2600 else
2602 var = return_slot;
2603 gcc_assert (TREE_CODE (var) != SSA_NAME);
2604 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2606 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2607 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2608 && !DECL_GIMPLE_REG_P (result)
2609 && DECL_P (var))
2610 DECL_GIMPLE_REG_P (var) = 0;
2611 use = NULL;
2612 goto done;
2615 /* All types requiring non-trivial constructors should have been handled. */
2616 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2618 /* Attempt to avoid creating a new temporary variable. */
2619 if (modify_dest
2620 && TREE_CODE (modify_dest) != SSA_NAME)
2622 bool use_it = false;
2624 /* We can't use MODIFY_DEST if there's type promotion involved. */
2625 if (!useless_type_conversion_p (callee_type, caller_type))
2626 use_it = false;
2628 /* ??? If we're assigning to a variable sized type, then we must
2629 reuse the destination variable, because we've no good way to
2630 create variable sized temporaries at this point. */
2631 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2632 use_it = true;
2634 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2635 reuse it as the result of the call directly. Don't do this if
2636 it would promote MODIFY_DEST to addressable. */
2637 else if (TREE_ADDRESSABLE (result))
2638 use_it = false;
2639 else
2641 tree base_m = get_base_address (modify_dest);
2643 /* If the base isn't a decl, then it's a pointer, and we don't
2644 know where that's going to go. */
2645 if (!DECL_P (base_m))
2646 use_it = false;
2647 else if (is_global_var (base_m))
2648 use_it = false;
2649 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2650 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2651 && !DECL_GIMPLE_REG_P (result)
2652 && DECL_GIMPLE_REG_P (base_m))
2653 use_it = false;
2654 else if (!TREE_ADDRESSABLE (base_m))
2655 use_it = true;
2658 if (use_it)
2660 var = modify_dest;
2661 use = NULL;
2662 goto done;
2666 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2668 var = copy_result_decl_to_var (result, id);
2669 if (gimple_in_ssa_p (cfun))
2671 get_var_ann (var);
2672 add_referenced_var (var);
2675 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2676 DECL_STRUCT_FUNCTION (caller)->local_decls
2677 = tree_cons (NULL_TREE, var,
2678 DECL_STRUCT_FUNCTION (caller)->local_decls);
2680 /* Do not have the rest of GCC warn about this variable as it should
2681 not be visible to the user. */
2682 TREE_NO_WARNING (var) = 1;
2684 declare_inline_vars (id->block, var);
2686 /* Build the use expr. If the return type of the function was
2687 promoted, convert it back to the expected type. */
2688 use = var;
2689 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2690 use = fold_convert (caller_type, var);
2692 STRIP_USELESS_TYPE_CONVERSION (use);
2694 if (DECL_BY_REFERENCE (result))
2696 TREE_ADDRESSABLE (var) = 1;
2697 var = build_fold_addr_expr (var);
2700 done:
2701 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2702 way, when the RESULT_DECL is encountered, it will be
2703 automatically replaced by the VAR_DECL. */
2704 insert_decl_map (id, result, var);
2706 /* Remember this so we can ignore it in remap_decls. */
2707 id->retvar = var;
2709 return use;
2712 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2713 to a local label. */
2715 static tree
2716 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2718 tree node = *nodep;
2719 tree fn = (tree) fnp;
2721 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2722 return node;
2724 if (TYPE_P (node))
2725 *walk_subtrees = 0;
2727 return NULL_TREE;
2730 /* Determine if the function can be copied. If so return NULL. If
2731 not return a string describng the reason for failure. */
2733 static const char *
2734 copy_forbidden (struct function *fun, tree fndecl)
2736 const char *reason = fun->cannot_be_copied_reason;
2737 tree step;
2739 /* Only examine the function once. */
2740 if (fun->cannot_be_copied_set)
2741 return reason;
2743 /* We cannot copy a function that receives a non-local goto
2744 because we cannot remap the destination label used in the
2745 function that is performing the non-local goto. */
2746 /* ??? Actually, this should be possible, if we work at it.
2747 No doubt there's just a handful of places that simply
2748 assume it doesn't happen and don't substitute properly. */
2749 if (fun->has_nonlocal_label)
2751 reason = G_("function %q+F can never be copied "
2752 "because it receives a non-local goto");
2753 goto fail;
2756 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2758 tree decl = TREE_VALUE (step);
2760 if (TREE_CODE (decl) == VAR_DECL
2761 && TREE_STATIC (decl)
2762 && !DECL_EXTERNAL (decl)
2763 && DECL_INITIAL (decl)
2764 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2765 has_label_address_in_static_1,
2766 fndecl))
2768 reason = G_("function %q+F can never be copied because it saves "
2769 "address of local label in a static variable");
2770 goto fail;
2774 fail:
2775 fun->cannot_be_copied_reason = reason;
2776 fun->cannot_be_copied_set = true;
2777 return reason;
2781 static const char *inline_forbidden_reason;
2783 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2784 iff a function can not be inlined. Also sets the reason why. */
2786 static tree
2787 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2788 struct walk_stmt_info *wip)
2790 tree fn = (tree) wip->info;
2791 tree t;
2792 gimple stmt = gsi_stmt (*gsi);
2794 switch (gimple_code (stmt))
2796 case GIMPLE_CALL:
2797 /* Refuse to inline alloca call unless user explicitly forced so as
2798 this may change program's memory overhead drastically when the
2799 function using alloca is called in loop. In GCC present in
2800 SPEC2000 inlining into schedule_block cause it to require 2GB of
2801 RAM instead of 256MB. */
2802 if (gimple_alloca_call_p (stmt)
2803 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2805 inline_forbidden_reason
2806 = G_("function %q+F can never be inlined because it uses "
2807 "alloca (override using the always_inline attribute)");
2808 *handled_ops_p = true;
2809 return fn;
2812 t = gimple_call_fndecl (stmt);
2813 if (t == NULL_TREE)
2814 break;
2816 /* We cannot inline functions that call setjmp. */
2817 if (setjmp_call_p (t))
2819 inline_forbidden_reason
2820 = G_("function %q+F can never be inlined because it uses setjmp");
2821 *handled_ops_p = true;
2822 return t;
2825 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2826 switch (DECL_FUNCTION_CODE (t))
2828 /* We cannot inline functions that take a variable number of
2829 arguments. */
2830 case BUILT_IN_VA_START:
2831 case BUILT_IN_NEXT_ARG:
2832 case BUILT_IN_VA_END:
2833 inline_forbidden_reason
2834 = G_("function %q+F can never be inlined because it "
2835 "uses variable argument lists");
2836 *handled_ops_p = true;
2837 return t;
2839 case BUILT_IN_LONGJMP:
2840 /* We can't inline functions that call __builtin_longjmp at
2841 all. The non-local goto machinery really requires the
2842 destination be in a different function. If we allow the
2843 function calling __builtin_longjmp to be inlined into the
2844 function calling __builtin_setjmp, Things will Go Awry. */
2845 inline_forbidden_reason
2846 = G_("function %q+F can never be inlined because "
2847 "it uses setjmp-longjmp exception handling");
2848 *handled_ops_p = true;
2849 return t;
2851 case BUILT_IN_NONLOCAL_GOTO:
2852 /* Similarly. */
2853 inline_forbidden_reason
2854 = G_("function %q+F can never be inlined because "
2855 "it uses non-local goto");
2856 *handled_ops_p = true;
2857 return t;
2859 case BUILT_IN_RETURN:
2860 case BUILT_IN_APPLY_ARGS:
2861 /* If a __builtin_apply_args caller would be inlined,
2862 it would be saving arguments of the function it has
2863 been inlined into. Similarly __builtin_return would
2864 return from the function the inline has been inlined into. */
2865 inline_forbidden_reason
2866 = G_("function %q+F can never be inlined because "
2867 "it uses __builtin_return or __builtin_apply_args");
2868 *handled_ops_p = true;
2869 return t;
2871 default:
2872 break;
2874 break;
2876 case GIMPLE_GOTO:
2877 t = gimple_goto_dest (stmt);
2879 /* We will not inline a function which uses computed goto. The
2880 addresses of its local labels, which may be tucked into
2881 global storage, are of course not constant across
2882 instantiations, which causes unexpected behavior. */
2883 if (TREE_CODE (t) != LABEL_DECL)
2885 inline_forbidden_reason
2886 = G_("function %q+F can never be inlined "
2887 "because it contains a computed goto");
2888 *handled_ops_p = true;
2889 return t;
2891 break;
2893 default:
2894 break;
2897 *handled_ops_p = false;
2898 return NULL_TREE;
2901 /* Return true if FNDECL is a function that cannot be inlined into
2902 another one. */
2904 static bool
2905 inline_forbidden_p (tree fndecl)
2907 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2908 struct walk_stmt_info wi;
2909 struct pointer_set_t *visited_nodes;
2910 basic_block bb;
2911 bool forbidden_p = false;
2913 /* First check for shared reasons not to copy the code. */
2914 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2915 if (inline_forbidden_reason != NULL)
2916 return true;
2918 /* Next, walk the statements of the function looking for
2919 constraucts we can't handle, or are non-optimal for inlining. */
2920 visited_nodes = pointer_set_create ();
2921 memset (&wi, 0, sizeof (wi));
2922 wi.info = (void *) fndecl;
2923 wi.pset = visited_nodes;
2925 FOR_EACH_BB_FN (bb, fun)
2927 gimple ret;
2928 gimple_seq seq = bb_seq (bb);
2929 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
2930 forbidden_p = (ret != NULL);
2931 if (forbidden_p)
2932 break;
2935 pointer_set_destroy (visited_nodes);
2936 return forbidden_p;
2939 /* Returns nonzero if FN is a function that does not have any
2940 fundamental inline blocking properties. */
2942 bool
2943 tree_inlinable_function_p (tree fn)
2945 bool inlinable = true;
2946 bool do_warning;
2947 tree always_inline;
2949 /* If we've already decided this function shouldn't be inlined,
2950 there's no need to check again. */
2951 if (DECL_UNINLINABLE (fn))
2952 return false;
2954 /* We only warn for functions declared `inline' by the user. */
2955 do_warning = (warn_inline
2956 && DECL_DECLARED_INLINE_P (fn)
2957 && !DECL_NO_INLINE_WARNING_P (fn)
2958 && !DECL_IN_SYSTEM_HEADER (fn));
2960 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2962 if (flag_no_inline
2963 && always_inline == NULL)
2965 if (do_warning)
2966 warning (OPT_Winline, "function %q+F can never be inlined because it "
2967 "is suppressed using -fno-inline", fn);
2968 inlinable = false;
2971 /* Don't auto-inline anything that might not be bound within
2972 this unit of translation. */
2973 else if (!DECL_DECLARED_INLINE_P (fn)
2974 && DECL_REPLACEABLE_P (fn))
2975 inlinable = false;
2977 else if (!function_attribute_inlinable_p (fn))
2979 if (do_warning)
2980 warning (OPT_Winline, "function %q+F can never be inlined because it "
2981 "uses attributes conflicting with inlining", fn);
2982 inlinable = false;
2985 else if (inline_forbidden_p (fn))
2987 /* See if we should warn about uninlinable functions. Previously,
2988 some of these warnings would be issued while trying to expand
2989 the function inline, but that would cause multiple warnings
2990 about functions that would for example call alloca. But since
2991 this a property of the function, just one warning is enough.
2992 As a bonus we can now give more details about the reason why a
2993 function is not inlinable. */
2994 if (always_inline)
2995 sorry (inline_forbidden_reason, fn);
2996 else if (do_warning)
2997 warning (OPT_Winline, inline_forbidden_reason, fn);
2999 inlinable = false;
3002 /* Squirrel away the result so that we don't have to check again. */
3003 DECL_UNINLINABLE (fn) = !inlinable;
3005 return inlinable;
3008 /* Estimate the cost of a memory move. Use machine dependent
3009 word size and take possible memcpy call into account. */
3012 estimate_move_cost (tree type)
3014 HOST_WIDE_INT size;
3016 gcc_assert (!VOID_TYPE_P (type));
3018 size = int_size_in_bytes (type);
3020 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3021 /* Cost of a memcpy call, 3 arguments and the call. */
3022 return 4;
3023 else
3024 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3027 /* Returns cost of operation CODE, according to WEIGHTS */
3029 static int
3030 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3031 tree op1 ATTRIBUTE_UNUSED, tree op2)
3033 switch (code)
3035 /* These are "free" conversions, or their presumed cost
3036 is folded into other operations. */
3037 case RANGE_EXPR:
3038 CASE_CONVERT:
3039 case COMPLEX_EXPR:
3040 case PAREN_EXPR:
3041 return 0;
3043 /* Assign cost of 1 to usual operations.
3044 ??? We may consider mapping RTL costs to this. */
3045 case COND_EXPR:
3046 case VEC_COND_EXPR:
3048 case PLUS_EXPR:
3049 case POINTER_PLUS_EXPR:
3050 case MINUS_EXPR:
3051 case MULT_EXPR:
3053 case ADDR_SPACE_CONVERT_EXPR:
3054 case FIXED_CONVERT_EXPR:
3055 case FIX_TRUNC_EXPR:
3057 case NEGATE_EXPR:
3058 case FLOAT_EXPR:
3059 case MIN_EXPR:
3060 case MAX_EXPR:
3061 case ABS_EXPR:
3063 case LSHIFT_EXPR:
3064 case RSHIFT_EXPR:
3065 case LROTATE_EXPR:
3066 case RROTATE_EXPR:
3067 case VEC_LSHIFT_EXPR:
3068 case VEC_RSHIFT_EXPR:
3070 case BIT_IOR_EXPR:
3071 case BIT_XOR_EXPR:
3072 case BIT_AND_EXPR:
3073 case BIT_NOT_EXPR:
3075 case TRUTH_ANDIF_EXPR:
3076 case TRUTH_ORIF_EXPR:
3077 case TRUTH_AND_EXPR:
3078 case TRUTH_OR_EXPR:
3079 case TRUTH_XOR_EXPR:
3080 case TRUTH_NOT_EXPR:
3082 case LT_EXPR:
3083 case LE_EXPR:
3084 case GT_EXPR:
3085 case GE_EXPR:
3086 case EQ_EXPR:
3087 case NE_EXPR:
3088 case ORDERED_EXPR:
3089 case UNORDERED_EXPR:
3091 case UNLT_EXPR:
3092 case UNLE_EXPR:
3093 case UNGT_EXPR:
3094 case UNGE_EXPR:
3095 case UNEQ_EXPR:
3096 case LTGT_EXPR:
3098 case CONJ_EXPR:
3100 case PREDECREMENT_EXPR:
3101 case PREINCREMENT_EXPR:
3102 case POSTDECREMENT_EXPR:
3103 case POSTINCREMENT_EXPR:
3105 case REALIGN_LOAD_EXPR:
3107 case REDUC_MAX_EXPR:
3108 case REDUC_MIN_EXPR:
3109 case REDUC_PLUS_EXPR:
3110 case WIDEN_SUM_EXPR:
3111 case WIDEN_MULT_EXPR:
3112 case DOT_PROD_EXPR:
3114 case VEC_WIDEN_MULT_HI_EXPR:
3115 case VEC_WIDEN_MULT_LO_EXPR:
3116 case VEC_UNPACK_HI_EXPR:
3117 case VEC_UNPACK_LO_EXPR:
3118 case VEC_UNPACK_FLOAT_HI_EXPR:
3119 case VEC_UNPACK_FLOAT_LO_EXPR:
3120 case VEC_PACK_TRUNC_EXPR:
3121 case VEC_PACK_SAT_EXPR:
3122 case VEC_PACK_FIX_TRUNC_EXPR:
3123 case VEC_EXTRACT_EVEN_EXPR:
3124 case VEC_EXTRACT_ODD_EXPR:
3125 case VEC_INTERLEAVE_HIGH_EXPR:
3126 case VEC_INTERLEAVE_LOW_EXPR:
3128 return 1;
3130 /* Few special cases of expensive operations. This is useful
3131 to avoid inlining on functions having too many of these. */
3132 case TRUNC_DIV_EXPR:
3133 case CEIL_DIV_EXPR:
3134 case FLOOR_DIV_EXPR:
3135 case ROUND_DIV_EXPR:
3136 case EXACT_DIV_EXPR:
3137 case TRUNC_MOD_EXPR:
3138 case CEIL_MOD_EXPR:
3139 case FLOOR_MOD_EXPR:
3140 case ROUND_MOD_EXPR:
3141 case RDIV_EXPR:
3142 if (TREE_CODE (op2) != INTEGER_CST)
3143 return weights->div_mod_cost;
3144 return 1;
3146 default:
3147 /* We expect a copy assignment with no operator. */
3148 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3149 return 0;
3154 /* Estimate number of instructions that will be created by expanding
3155 the statements in the statement sequence STMTS.
3156 WEIGHTS contains weights attributed to various constructs. */
3158 static
3159 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3161 int cost;
3162 gimple_stmt_iterator gsi;
3164 cost = 0;
3165 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3166 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3168 return cost;
3172 /* Estimate number of instructions that will be created by expanding STMT.
3173 WEIGHTS contains weights attributed to various constructs. */
3176 estimate_num_insns (gimple stmt, eni_weights *weights)
3178 unsigned cost, i;
3179 enum gimple_code code = gimple_code (stmt);
3180 tree lhs;
3181 tree rhs;
3183 switch (code)
3185 case GIMPLE_ASSIGN:
3186 /* Try to estimate the cost of assignments. We have three cases to
3187 deal with:
3188 1) Simple assignments to registers;
3189 2) Stores to things that must live in memory. This includes
3190 "normal" stores to scalars, but also assignments of large
3191 structures, or constructors of big arrays;
3193 Let us look at the first two cases, assuming we have "a = b + C":
3194 <GIMPLE_ASSIGN <var_decl "a">
3195 <plus_expr <var_decl "b"> <constant C>>
3196 If "a" is a GIMPLE register, the assignment to it is free on almost
3197 any target, because "a" usually ends up in a real register. Hence
3198 the only cost of this expression comes from the PLUS_EXPR, and we
3199 can ignore the GIMPLE_ASSIGN.
3200 If "a" is not a GIMPLE register, the assignment to "a" will most
3201 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3202 of moving something into "a", which we compute using the function
3203 estimate_move_cost. */
3204 lhs = gimple_assign_lhs (stmt);
3205 rhs = gimple_assign_rhs1 (stmt);
3207 if (is_gimple_reg (lhs))
3208 cost = 0;
3209 else
3210 cost = estimate_move_cost (TREE_TYPE (lhs));
3212 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3213 cost += estimate_move_cost (TREE_TYPE (rhs));
3215 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3216 gimple_assign_rhs1 (stmt),
3217 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3218 == GIMPLE_BINARY_RHS
3219 ? gimple_assign_rhs2 (stmt) : NULL);
3220 break;
3222 case GIMPLE_COND:
3223 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3224 gimple_op (stmt, 0),
3225 gimple_op (stmt, 1));
3226 break;
3228 case GIMPLE_SWITCH:
3229 /* Take into account cost of the switch + guess 2 conditional jumps for
3230 each case label.
3232 TODO: once the switch expansion logic is sufficiently separated, we can
3233 do better job on estimating cost of the switch. */
3234 if (weights->time_based)
3235 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3236 else
3237 cost = gimple_switch_num_labels (stmt) * 2;
3238 break;
3240 case GIMPLE_CALL:
3242 tree decl = gimple_call_fndecl (stmt);
3243 tree addr = gimple_call_fn (stmt);
3244 tree funtype = TREE_TYPE (addr);
3246 if (POINTER_TYPE_P (funtype))
3247 funtype = TREE_TYPE (funtype);
3249 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3250 cost = weights->target_builtin_call_cost;
3251 else
3252 cost = weights->call_cost;
3254 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3255 switch (DECL_FUNCTION_CODE (decl))
3257 case BUILT_IN_CONSTANT_P:
3258 return 0;
3259 case BUILT_IN_EXPECT:
3260 return 0;
3262 /* Prefetch instruction is not expensive. */
3263 case BUILT_IN_PREFETCH:
3264 cost = weights->target_builtin_call_cost;
3265 break;
3267 /* Exception state returns or moves registers around. */
3268 case BUILT_IN_EH_FILTER:
3269 case BUILT_IN_EH_POINTER:
3270 case BUILT_IN_EH_COPY_VALUES:
3271 return 0;
3273 default:
3274 break;
3277 if (decl)
3278 funtype = TREE_TYPE (decl);
3280 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3281 cost += estimate_move_cost (TREE_TYPE (funtype));
3282 /* Our cost must be kept in sync with
3283 cgraph_estimate_size_after_inlining that does use function
3284 declaration to figure out the arguments. */
3285 if (decl && DECL_ARGUMENTS (decl))
3287 tree arg;
3288 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3289 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3290 cost += estimate_move_cost (TREE_TYPE (arg));
3292 else if (funtype && prototype_p (funtype))
3294 tree t;
3295 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3296 t = TREE_CHAIN (t))
3297 if (!VOID_TYPE_P (TREE_VALUE (t)))
3298 cost += estimate_move_cost (TREE_VALUE (t));
3300 else
3302 for (i = 0; i < gimple_call_num_args (stmt); i++)
3304 tree arg = gimple_call_arg (stmt, i);
3305 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3306 cost += estimate_move_cost (TREE_TYPE (arg));
3310 break;
3313 case GIMPLE_GOTO:
3314 case GIMPLE_LABEL:
3315 case GIMPLE_NOP:
3316 case GIMPLE_PHI:
3317 case GIMPLE_RETURN:
3318 case GIMPLE_PREDICT:
3319 case GIMPLE_DEBUG:
3320 return 0;
3322 case GIMPLE_ASM:
3323 return asm_str_count (gimple_asm_string (stmt));
3325 case GIMPLE_RESX:
3326 /* This is either going to be an external function call with one
3327 argument, or two register copy statements plus a goto. */
3328 return 2;
3330 case GIMPLE_EH_DISPATCH:
3331 /* ??? This is going to turn into a switch statement. Ideally
3332 we'd have a look at the eh region and estimate the number of
3333 edges involved. */
3334 return 10;
3336 case GIMPLE_BIND:
3337 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3339 case GIMPLE_EH_FILTER:
3340 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3342 case GIMPLE_CATCH:
3343 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3345 case GIMPLE_TRY:
3346 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3347 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3349 /* OpenMP directives are generally very expensive. */
3351 case GIMPLE_OMP_RETURN:
3352 case GIMPLE_OMP_SECTIONS_SWITCH:
3353 case GIMPLE_OMP_ATOMIC_STORE:
3354 case GIMPLE_OMP_CONTINUE:
3355 /* ...except these, which are cheap. */
3356 return 0;
3358 case GIMPLE_OMP_ATOMIC_LOAD:
3359 return weights->omp_cost;
3361 case GIMPLE_OMP_FOR:
3362 return (weights->omp_cost
3363 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3364 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3366 case GIMPLE_OMP_PARALLEL:
3367 case GIMPLE_OMP_TASK:
3368 case GIMPLE_OMP_CRITICAL:
3369 case GIMPLE_OMP_MASTER:
3370 case GIMPLE_OMP_ORDERED:
3371 case GIMPLE_OMP_SECTION:
3372 case GIMPLE_OMP_SECTIONS:
3373 case GIMPLE_OMP_SINGLE:
3374 return (weights->omp_cost
3375 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3377 default:
3378 gcc_unreachable ();
3381 return cost;
3384 /* Estimate number of instructions that will be created by expanding
3385 function FNDECL. WEIGHTS contains weights attributed to various
3386 constructs. */
3389 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3391 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3392 gimple_stmt_iterator bsi;
3393 basic_block bb;
3394 int n = 0;
3396 gcc_assert (my_function && my_function->cfg);
3397 FOR_EACH_BB_FN (bb, my_function)
3399 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3400 n += estimate_num_insns (gsi_stmt (bsi), weights);
3403 return n;
3407 /* Initializes weights used by estimate_num_insns. */
3409 void
3410 init_inline_once (void)
3412 eni_size_weights.call_cost = 1;
3413 eni_size_weights.target_builtin_call_cost = 1;
3414 eni_size_weights.div_mod_cost = 1;
3415 eni_size_weights.omp_cost = 40;
3416 eni_size_weights.time_based = false;
3418 /* Estimating time for call is difficult, since we have no idea what the
3419 called function does. In the current uses of eni_time_weights,
3420 underestimating the cost does less harm than overestimating it, so
3421 we choose a rather small value here. */
3422 eni_time_weights.call_cost = 10;
3423 eni_time_weights.target_builtin_call_cost = 10;
3424 eni_time_weights.div_mod_cost = 10;
3425 eni_time_weights.omp_cost = 40;
3426 eni_time_weights.time_based = true;
3429 /* Estimate the number of instructions in a gimple_seq. */
3432 count_insns_seq (gimple_seq seq, eni_weights *weights)
3434 gimple_stmt_iterator gsi;
3435 int n = 0;
3436 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3437 n += estimate_num_insns (gsi_stmt (gsi), weights);
3439 return n;
3443 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3445 static void
3446 prepend_lexical_block (tree current_block, tree new_block)
3448 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3449 BLOCK_SUBBLOCKS (current_block) = new_block;
3450 BLOCK_SUPERCONTEXT (new_block) = current_block;
3453 /* Fetch callee declaration from the call graph edge going from NODE and
3454 associated with STMR call statement. Return NULL_TREE if not found. */
3455 static tree
3456 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3458 struct cgraph_edge *cs;
3460 cs = cgraph_edge (node, stmt);
3461 if (cs)
3462 return cs->callee->decl;
3464 return NULL_TREE;
3467 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3469 static bool
3470 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3472 tree use_retvar;
3473 tree fn;
3474 struct pointer_map_t *st, *dst;
3475 tree return_slot;
3476 tree modify_dest;
3477 location_t saved_location;
3478 struct cgraph_edge *cg_edge;
3479 cgraph_inline_failed_t reason;
3480 basic_block return_block;
3481 edge e;
3482 gimple_stmt_iterator gsi, stmt_gsi;
3483 bool successfully_inlined = FALSE;
3484 bool purge_dead_abnormal_edges;
3485 tree t_step;
3486 tree var;
3488 /* Set input_location here so we get the right instantiation context
3489 if we call instantiate_decl from inlinable_function_p. */
3490 saved_location = input_location;
3491 if (gimple_has_location (stmt))
3492 input_location = gimple_location (stmt);
3494 /* From here on, we're only interested in CALL_EXPRs. */
3495 if (gimple_code (stmt) != GIMPLE_CALL)
3496 goto egress;
3498 /* First, see if we can figure out what function is being called.
3499 If we cannot, then there is no hope of inlining the function. */
3500 fn = gimple_call_fndecl (stmt);
3501 if (!fn)
3503 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3504 if (!fn)
3505 goto egress;
3508 /* Turn forward declarations into real ones. */
3509 fn = cgraph_node (fn)->decl;
3511 /* If FN is a declaration of a function in a nested scope that was
3512 globally declared inline, we don't set its DECL_INITIAL.
3513 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3514 C++ front-end uses it for cdtors to refer to their internal
3515 declarations, that are not real functions. Fortunately those
3516 don't have trees to be saved, so we can tell by checking their
3517 gimple_body. */
3518 if (!DECL_INITIAL (fn)
3519 && DECL_ABSTRACT_ORIGIN (fn)
3520 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3521 fn = DECL_ABSTRACT_ORIGIN (fn);
3523 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3524 Kill this check once this is fixed. */
3525 if (!id->dst_node->analyzed)
3526 goto egress;
3528 cg_edge = cgraph_edge (id->dst_node, stmt);
3530 /* Don't inline functions with different EH personalities. */
3531 if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3532 && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
3533 && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3534 != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
3535 goto egress;
3537 /* Don't try to inline functions that are not well-suited to
3538 inlining. */
3539 if (!cgraph_inline_p (cg_edge, &reason))
3541 /* If this call was originally indirect, we do not want to emit any
3542 inlining related warnings or sorry messages because there are no
3543 guarantees regarding those. */
3544 if (cg_edge->indirect_call)
3545 goto egress;
3547 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3548 /* Avoid warnings during early inline pass. */
3549 && cgraph_global_info_ready)
3551 sorry ("inlining failed in call to %q+F: %s", fn,
3552 cgraph_inline_failed_string (reason));
3553 sorry ("called from here");
3555 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3556 && !DECL_IN_SYSTEM_HEADER (fn)
3557 && reason != CIF_UNSPECIFIED
3558 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3559 /* Avoid warnings during early inline pass. */
3560 && cgraph_global_info_ready)
3562 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3563 fn, cgraph_inline_failed_string (reason));
3564 warning (OPT_Winline, "called from here");
3566 goto egress;
3568 fn = cg_edge->callee->decl;
3570 #ifdef ENABLE_CHECKING
3571 if (cg_edge->callee->decl != id->dst_node->decl)
3572 verify_cgraph_node (cg_edge->callee);
3573 #endif
3575 /* We will be inlining this callee. */
3576 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3578 /* Update the callers EH personality. */
3579 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3580 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3581 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3583 /* Split the block holding the GIMPLE_CALL. */
3584 e = split_block (bb, stmt);
3585 bb = e->src;
3586 return_block = e->dest;
3587 remove_edge (e);
3589 /* split_block splits after the statement; work around this by
3590 moving the call into the second block manually. Not pretty,
3591 but seems easier than doing the CFG manipulation by hand
3592 when the GIMPLE_CALL is in the last statement of BB. */
3593 stmt_gsi = gsi_last_bb (bb);
3594 gsi_remove (&stmt_gsi, false);
3596 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3597 been the source of abnormal edges. In this case, schedule
3598 the removal of dead abnormal edges. */
3599 gsi = gsi_start_bb (return_block);
3600 if (gsi_end_p (gsi))
3602 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3603 purge_dead_abnormal_edges = true;
3605 else
3607 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3608 purge_dead_abnormal_edges = false;
3611 stmt_gsi = gsi_start_bb (return_block);
3613 /* Build a block containing code to initialize the arguments, the
3614 actual inline expansion of the body, and a label for the return
3615 statements within the function to jump to. The type of the
3616 statement expression is the return type of the function call. */
3617 id->block = make_node (BLOCK);
3618 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3619 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3620 prepend_lexical_block (gimple_block (stmt), id->block);
3622 /* Local declarations will be replaced by their equivalents in this
3623 map. */
3624 st = id->decl_map;
3625 id->decl_map = pointer_map_create ();
3626 dst = id->debug_map;
3627 id->debug_map = NULL;
3629 /* Record the function we are about to inline. */
3630 id->src_fn = fn;
3631 id->src_node = cg_edge->callee;
3632 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3633 id->gimple_call = stmt;
3635 gcc_assert (!id->src_cfun->after_inlining);
3637 id->entry_bb = bb;
3638 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3640 gimple_stmt_iterator si = gsi_last_bb (bb);
3641 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3642 NOT_TAKEN),
3643 GSI_NEW_STMT);
3645 initialize_inlined_parameters (id, stmt, fn, bb);
3647 if (DECL_INITIAL (fn))
3648 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3650 /* Return statements in the function body will be replaced by jumps
3651 to the RET_LABEL. */
3652 gcc_assert (DECL_INITIAL (fn));
3653 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3655 /* Find the LHS to which the result of this call is assigned. */
3656 return_slot = NULL;
3657 if (gimple_call_lhs (stmt))
3659 modify_dest = gimple_call_lhs (stmt);
3661 /* The function which we are inlining might not return a value,
3662 in which case we should issue a warning that the function
3663 does not return a value. In that case the optimizers will
3664 see that the variable to which the value is assigned was not
3665 initialized. We do not want to issue a warning about that
3666 uninitialized variable. */
3667 if (DECL_P (modify_dest))
3668 TREE_NO_WARNING (modify_dest) = 1;
3670 if (gimple_call_return_slot_opt_p (stmt))
3672 return_slot = modify_dest;
3673 modify_dest = NULL;
3676 else
3677 modify_dest = NULL;
3679 /* If we are inlining a call to the C++ operator new, we don't want
3680 to use type based alias analysis on the return value. Otherwise
3681 we may get confused if the compiler sees that the inlined new
3682 function returns a pointer which was just deleted. See bug
3683 33407. */
3684 if (DECL_IS_OPERATOR_NEW (fn))
3686 return_slot = NULL;
3687 modify_dest = NULL;
3690 /* Declare the return variable for the function. */
3691 use_retvar = declare_return_variable (id, return_slot, modify_dest);
3693 /* Add local vars in this inlined callee to caller. */
3694 t_step = id->src_cfun->local_decls;
3695 for (; t_step; t_step = TREE_CHAIN (t_step))
3697 var = TREE_VALUE (t_step);
3698 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3700 if (var_ann (var) && add_referenced_var (var))
3701 cfun->local_decls = tree_cons (NULL_TREE, var,
3702 cfun->local_decls);
3704 else if (!can_be_nonlocal (var, id))
3705 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3706 cfun->local_decls);
3709 if (dump_file && (dump_flags & TDF_DETAILS))
3711 fprintf (dump_file, "Inlining ");
3712 print_generic_expr (dump_file, id->src_fn, 0);
3713 fprintf (dump_file, " to ");
3714 print_generic_expr (dump_file, id->dst_fn, 0);
3715 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3718 /* This is it. Duplicate the callee body. Assume callee is
3719 pre-gimplified. Note that we must not alter the caller
3720 function in any way before this point, as this CALL_EXPR may be
3721 a self-referential call; if we're calling ourselves, we need to
3722 duplicate our body before altering anything. */
3723 copy_body (id, bb->count,
3724 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3725 bb, return_block);
3727 /* Reset the escaped and callused solutions. */
3728 if (cfun->gimple_df)
3730 pt_solution_reset (&cfun->gimple_df->escaped);
3731 pt_solution_reset (&cfun->gimple_df->callused);
3734 /* Clean up. */
3735 if (id->debug_map)
3737 pointer_map_destroy (id->debug_map);
3738 id->debug_map = dst;
3740 pointer_map_destroy (id->decl_map);
3741 id->decl_map = st;
3743 /* Unlink the calls virtual operands before replacing it. */
3744 unlink_stmt_vdef (stmt);
3746 /* If the inlined function returns a result that we care about,
3747 substitute the GIMPLE_CALL with an assignment of the return
3748 variable to the LHS of the call. That is, if STMT was
3749 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3750 if (use_retvar && gimple_call_lhs (stmt))
3752 gimple old_stmt = stmt;
3753 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3754 gsi_replace (&stmt_gsi, stmt, false);
3755 if (gimple_in_ssa_p (cfun))
3756 mark_symbols_for_renaming (stmt);
3757 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3759 else
3761 /* Handle the case of inlining a function with no return
3762 statement, which causes the return value to become undefined. */
3763 if (gimple_call_lhs (stmt)
3764 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3766 tree name = gimple_call_lhs (stmt);
3767 tree var = SSA_NAME_VAR (name);
3768 tree def = gimple_default_def (cfun, var);
3770 if (def)
3772 /* If the variable is used undefined, make this name
3773 undefined via a move. */
3774 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3775 gsi_replace (&stmt_gsi, stmt, true);
3777 else
3779 /* Otherwise make this variable undefined. */
3780 gsi_remove (&stmt_gsi, true);
3781 set_default_def (var, name);
3782 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3785 else
3786 gsi_remove (&stmt_gsi, true);
3789 if (purge_dead_abnormal_edges)
3790 gimple_purge_dead_abnormal_call_edges (return_block);
3792 /* If the value of the new expression is ignored, that's OK. We
3793 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3794 the equivalent inlined version either. */
3795 if (is_gimple_assign (stmt))
3797 gcc_assert (gimple_assign_single_p (stmt)
3798 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3799 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3802 /* Output the inlining info for this abstract function, since it has been
3803 inlined. If we don't do this now, we can lose the information about the
3804 variables in the function when the blocks get blown away as soon as we
3805 remove the cgraph node. */
3806 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3808 /* Update callgraph if needed. */
3809 cgraph_remove_node (cg_edge->callee);
3811 id->block = NULL_TREE;
3812 successfully_inlined = TRUE;
3814 egress:
3815 input_location = saved_location;
3816 return successfully_inlined;
3819 /* Expand call statements reachable from STMT_P.
3820 We can only have CALL_EXPRs as the "toplevel" tree code or nested
3821 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
3822 unfortunately not use that function here because we need a pointer
3823 to the CALL_EXPR, not the tree itself. */
3825 static bool
3826 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3828 gimple_stmt_iterator gsi;
3830 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3832 gimple stmt = gsi_stmt (gsi);
3834 if (is_gimple_call (stmt)
3835 && expand_call_inline (bb, stmt, id))
3836 return true;
3839 return false;
3843 /* Walk all basic blocks created after FIRST and try to fold every statement
3844 in the STATEMENTS pointer set. */
3846 static void
3847 fold_marked_statements (int first, struct pointer_set_t *statements)
3849 for (; first < n_basic_blocks; first++)
3850 if (BASIC_BLOCK (first))
3852 gimple_stmt_iterator gsi;
3854 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3855 !gsi_end_p (gsi);
3856 gsi_next (&gsi))
3857 if (pointer_set_contains (statements, gsi_stmt (gsi)))
3859 gimple old_stmt = gsi_stmt (gsi);
3860 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3862 if (old_decl && DECL_BUILT_IN (old_decl))
3864 /* Folding builtins can create multiple instructions,
3865 we need to look at all of them. */
3866 gimple_stmt_iterator i2 = gsi;
3867 gsi_prev (&i2);
3868 if (fold_stmt (&gsi))
3870 gimple new_stmt;
3871 if (gsi_end_p (i2))
3872 i2 = gsi_start_bb (BASIC_BLOCK (first));
3873 else
3874 gsi_next (&i2);
3875 while (1)
3877 new_stmt = gsi_stmt (i2);
3878 update_stmt (new_stmt);
3879 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3880 new_stmt);
3882 if (new_stmt == gsi_stmt (gsi))
3884 /* It is okay to check only for the very last
3885 of these statements. If it is a throwing
3886 statement nothing will change. If it isn't
3887 this can remove EH edges. If that weren't
3888 correct then because some intermediate stmts
3889 throw, but not the last one. That would mean
3890 we'd have to split the block, which we can't
3891 here and we'd loose anyway. And as builtins
3892 probably never throw, this all
3893 is mood anyway. */
3894 if (maybe_clean_or_replace_eh_stmt (old_stmt,
3895 new_stmt))
3896 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3897 break;
3899 gsi_next (&i2);
3903 else if (fold_stmt (&gsi))
3905 /* Re-read the statement from GSI as fold_stmt() may
3906 have changed it. */
3907 gimple new_stmt = gsi_stmt (gsi);
3908 update_stmt (new_stmt);
3910 if (is_gimple_call (old_stmt)
3911 || is_gimple_call (new_stmt))
3912 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3913 new_stmt);
3915 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
3916 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3922 /* Return true if BB has at least one abnormal outgoing edge. */
3924 static inline bool
3925 has_abnormal_outgoing_edge_p (basic_block bb)
3927 edge e;
3928 edge_iterator ei;
3930 FOR_EACH_EDGE (e, ei, bb->succs)
3931 if (e->flags & EDGE_ABNORMAL)
3932 return true;
3934 return false;
3937 /* Expand calls to inline functions in the body of FN. */
3939 unsigned int
3940 optimize_inline_calls (tree fn)
3942 copy_body_data id;
3943 basic_block bb;
3944 int last = n_basic_blocks;
3945 struct gimplify_ctx gctx;
3947 /* There is no point in performing inlining if errors have already
3948 occurred -- and we might crash if we try to inline invalid
3949 code. */
3950 if (errorcount || sorrycount)
3951 return 0;
3953 /* Clear out ID. */
3954 memset (&id, 0, sizeof (id));
3956 id.src_node = id.dst_node = cgraph_node (fn);
3957 id.dst_fn = fn;
3958 /* Or any functions that aren't finished yet. */
3959 if (current_function_decl)
3960 id.dst_fn = current_function_decl;
3962 id.copy_decl = copy_decl_maybe_to_var;
3963 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3964 id.transform_new_cfg = false;
3965 id.transform_return_to_modify = true;
3966 id.transform_lang_insert_block = NULL;
3967 id.statements_to_fold = pointer_set_create ();
3969 push_gimplify_context (&gctx);
3971 /* We make no attempts to keep dominance info up-to-date. */
3972 free_dominance_info (CDI_DOMINATORS);
3973 free_dominance_info (CDI_POST_DOMINATORS);
3975 /* Register specific gimple functions. */
3976 gimple_register_cfg_hooks ();
3978 /* Reach the trees by walking over the CFG, and note the
3979 enclosing basic-blocks in the call edges. */
3980 /* We walk the blocks going forward, because inlined function bodies
3981 will split id->current_basic_block, and the new blocks will
3982 follow it; we'll trudge through them, processing their CALL_EXPRs
3983 along the way. */
3984 FOR_EACH_BB (bb)
3985 gimple_expand_calls_inline (bb, &id);
3987 pop_gimplify_context (NULL);
3989 #ifdef ENABLE_CHECKING
3991 struct cgraph_edge *e;
3993 verify_cgraph_node (id.dst_node);
3995 /* Double check that we inlined everything we are supposed to inline. */
3996 for (e = id.dst_node->callees; e; e = e->next_callee)
3997 gcc_assert (e->inline_failed);
3999 #endif
4001 /* Fold the statements before compacting/renumbering the basic blocks. */
4002 fold_marked_statements (last, id.statements_to_fold);
4003 pointer_set_destroy (id.statements_to_fold);
4005 gcc_assert (!id.debug_stmts);
4007 /* Renumber the (code) basic_blocks consecutively. */
4008 compact_blocks ();
4009 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4010 number_blocks (fn);
4012 fold_cond_expr_cond ();
4013 delete_unreachable_blocks_update_callgraph (&id);
4014 #ifdef ENABLE_CHECKING
4015 verify_cgraph_node (id.dst_node);
4016 #endif
4018 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4019 not possible yet - the IPA passes might make various functions to not
4020 throw and they don't care to proactively update local EH info. This is
4021 done later in fixup_cfg pass that also execute the verification. */
4022 return (TODO_update_ssa
4023 | TODO_cleanup_cfg
4024 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4025 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4028 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4030 tree
4031 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4033 enum tree_code code = TREE_CODE (*tp);
4034 enum tree_code_class cl = TREE_CODE_CLASS (code);
4036 /* We make copies of most nodes. */
4037 if (IS_EXPR_CODE_CLASS (cl)
4038 || code == TREE_LIST
4039 || code == TREE_VEC
4040 || code == TYPE_DECL
4041 || code == OMP_CLAUSE)
4043 /* Because the chain gets clobbered when we make a copy, we save it
4044 here. */
4045 tree chain = NULL_TREE, new_tree;
4047 chain = TREE_CHAIN (*tp);
4049 /* Copy the node. */
4050 new_tree = copy_node (*tp);
4052 /* Propagate mudflap marked-ness. */
4053 if (flag_mudflap && mf_marked_p (*tp))
4054 mf_mark (new_tree);
4056 *tp = new_tree;
4058 /* Now, restore the chain, if appropriate. That will cause
4059 walk_tree to walk into the chain as well. */
4060 if (code == PARM_DECL
4061 || code == TREE_LIST
4062 || code == OMP_CLAUSE)
4063 TREE_CHAIN (*tp) = chain;
4065 /* For now, we don't update BLOCKs when we make copies. So, we
4066 have to nullify all BIND_EXPRs. */
4067 if (TREE_CODE (*tp) == BIND_EXPR)
4068 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4070 else if (code == CONSTRUCTOR)
4072 /* CONSTRUCTOR nodes need special handling because
4073 we need to duplicate the vector of elements. */
4074 tree new_tree;
4076 new_tree = copy_node (*tp);
4078 /* Propagate mudflap marked-ness. */
4079 if (flag_mudflap && mf_marked_p (*tp))
4080 mf_mark (new_tree);
4082 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4083 CONSTRUCTOR_ELTS (*tp));
4084 *tp = new_tree;
4086 else if (TREE_CODE_CLASS (code) == tcc_type)
4087 *walk_subtrees = 0;
4088 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4089 *walk_subtrees = 0;
4090 else if (TREE_CODE_CLASS (code) == tcc_constant)
4091 *walk_subtrees = 0;
4092 else
4093 gcc_assert (code != STATEMENT_LIST);
4094 return NULL_TREE;
4097 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4098 information indicating to what new SAVE_EXPR this one should be mapped,
4099 use that one. Otherwise, create a new node and enter it in ST. FN is
4100 the function into which the copy will be placed. */
4102 static void
4103 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4105 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4106 tree *n;
4107 tree t;
4109 /* See if we already encountered this SAVE_EXPR. */
4110 n = (tree *) pointer_map_contains (st, *tp);
4112 /* If we didn't already remap this SAVE_EXPR, do so now. */
4113 if (!n)
4115 t = copy_node (*tp);
4117 /* Remember this SAVE_EXPR. */
4118 *pointer_map_insert (st, *tp) = t;
4119 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4120 *pointer_map_insert (st, t) = t;
4122 else
4124 /* We've already walked into this SAVE_EXPR; don't do it again. */
4125 *walk_subtrees = 0;
4126 t = *n;
4129 /* Replace this SAVE_EXPR with the copy. */
4130 *tp = t;
4133 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4134 copies the declaration and enters it in the splay_tree in DATA (which is
4135 really an `copy_body_data *'). */
4137 static tree
4138 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4139 void *data)
4141 copy_body_data *id = (copy_body_data *) data;
4143 /* Don't walk into types. */
4144 if (TYPE_P (*tp))
4145 *walk_subtrees = 0;
4147 else if (TREE_CODE (*tp) == LABEL_EXPR)
4149 tree decl = TREE_OPERAND (*tp, 0);
4151 /* Copy the decl and remember the copy. */
4152 insert_decl_map (id, decl, id->copy_decl (decl, id));
4155 return NULL_TREE;
4158 /* Perform any modifications to EXPR required when it is unsaved. Does
4159 not recurse into EXPR's subtrees. */
4161 static void
4162 unsave_expr_1 (tree expr)
4164 switch (TREE_CODE (expr))
4166 case TARGET_EXPR:
4167 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4168 It's OK for this to happen if it was part of a subtree that
4169 isn't immediately expanded, such as operand 2 of another
4170 TARGET_EXPR. */
4171 if (TREE_OPERAND (expr, 1))
4172 break;
4174 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4175 TREE_OPERAND (expr, 3) = NULL_TREE;
4176 break;
4178 default:
4179 break;
4183 /* Called via walk_tree when an expression is unsaved. Using the
4184 splay_tree pointed to by ST (which is really a `splay_tree'),
4185 remaps all local declarations to appropriate replacements. */
4187 static tree
4188 unsave_r (tree *tp, int *walk_subtrees, void *data)
4190 copy_body_data *id = (copy_body_data *) data;
4191 struct pointer_map_t *st = id->decl_map;
4192 tree *n;
4194 /* Only a local declaration (variable or label). */
4195 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4196 || TREE_CODE (*tp) == LABEL_DECL)
4198 /* Lookup the declaration. */
4199 n = (tree *) pointer_map_contains (st, *tp);
4201 /* If it's there, remap it. */
4202 if (n)
4203 *tp = *n;
4206 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4207 gcc_unreachable ();
4208 else if (TREE_CODE (*tp) == BIND_EXPR)
4209 copy_bind_expr (tp, walk_subtrees, id);
4210 else if (TREE_CODE (*tp) == SAVE_EXPR
4211 || TREE_CODE (*tp) == TARGET_EXPR)
4212 remap_save_expr (tp, st, walk_subtrees);
4213 else
4215 copy_tree_r (tp, walk_subtrees, NULL);
4217 /* Do whatever unsaving is required. */
4218 unsave_expr_1 (*tp);
4221 /* Keep iterating. */
4222 return NULL_TREE;
4225 /* Copies everything in EXPR and replaces variables, labels
4226 and SAVE_EXPRs local to EXPR. */
4228 tree
4229 unsave_expr_now (tree expr)
4231 copy_body_data id;
4233 /* There's nothing to do for NULL_TREE. */
4234 if (expr == 0)
4235 return expr;
4237 /* Set up ID. */
4238 memset (&id, 0, sizeof (id));
4239 id.src_fn = current_function_decl;
4240 id.dst_fn = current_function_decl;
4241 id.decl_map = pointer_map_create ();
4242 id.debug_map = NULL;
4244 id.copy_decl = copy_decl_no_change;
4245 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4246 id.transform_new_cfg = false;
4247 id.transform_return_to_modify = false;
4248 id.transform_lang_insert_block = NULL;
4250 /* Walk the tree once to find local labels. */
4251 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4253 /* Walk the tree again, copying, remapping, and unsaving. */
4254 walk_tree (&expr, unsave_r, &id, NULL);
4256 /* Clean up. */
4257 pointer_map_destroy (id.decl_map);
4258 if (id.debug_map)
4259 pointer_map_destroy (id.debug_map);
4261 return expr;
4264 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4265 label, copies the declaration and enters it in the splay_tree in DATA (which
4266 is really a 'copy_body_data *'. */
4268 static tree
4269 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4270 bool *handled_ops_p ATTRIBUTE_UNUSED,
4271 struct walk_stmt_info *wi)
4273 copy_body_data *id = (copy_body_data *) wi->info;
4274 gimple stmt = gsi_stmt (*gsip);
4276 if (gimple_code (stmt) == GIMPLE_LABEL)
4278 tree decl = gimple_label_label (stmt);
4280 /* Copy the decl and remember the copy. */
4281 insert_decl_map (id, decl, id->copy_decl (decl, id));
4284 return NULL_TREE;
4288 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4289 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4290 remaps all local declarations to appropriate replacements in gimple
4291 operands. */
4293 static tree
4294 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4296 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4297 copy_body_data *id = (copy_body_data *) wi->info;
4298 struct pointer_map_t *st = id->decl_map;
4299 tree *n;
4300 tree expr = *tp;
4302 /* Only a local declaration (variable or label). */
4303 if ((TREE_CODE (expr) == VAR_DECL
4304 && !TREE_STATIC (expr))
4305 || TREE_CODE (expr) == LABEL_DECL)
4307 /* Lookup the declaration. */
4308 n = (tree *) pointer_map_contains (st, expr);
4310 /* If it's there, remap it. */
4311 if (n)
4312 *tp = *n;
4313 *walk_subtrees = 0;
4315 else if (TREE_CODE (expr) == STATEMENT_LIST
4316 || TREE_CODE (expr) == BIND_EXPR
4317 || TREE_CODE (expr) == SAVE_EXPR)
4318 gcc_unreachable ();
4319 else if (TREE_CODE (expr) == TARGET_EXPR)
4321 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4322 It's OK for this to happen if it was part of a subtree that
4323 isn't immediately expanded, such as operand 2 of another
4324 TARGET_EXPR. */
4325 if (!TREE_OPERAND (expr, 1))
4327 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4328 TREE_OPERAND (expr, 3) = NULL_TREE;
4332 /* Keep iterating. */
4333 return NULL_TREE;
4337 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4338 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4339 remaps all local declarations to appropriate replacements in gimple
4340 statements. */
4342 static tree
4343 replace_locals_stmt (gimple_stmt_iterator *gsip,
4344 bool *handled_ops_p ATTRIBUTE_UNUSED,
4345 struct walk_stmt_info *wi)
4347 copy_body_data *id = (copy_body_data *) wi->info;
4348 gimple stmt = gsi_stmt (*gsip);
4350 if (gimple_code (stmt) == GIMPLE_BIND)
4352 tree block = gimple_bind_block (stmt);
4354 if (block)
4356 remap_block (&block, id);
4357 gimple_bind_set_block (stmt, block);
4360 /* This will remap a lot of the same decls again, but this should be
4361 harmless. */
4362 if (gimple_bind_vars (stmt))
4363 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4366 /* Keep iterating. */
4367 return NULL_TREE;
4371 /* Copies everything in SEQ and replaces variables and labels local to
4372 current_function_decl. */
4374 gimple_seq
4375 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4377 copy_body_data id;
4378 struct walk_stmt_info wi;
4379 struct pointer_set_t *visited;
4380 gimple_seq copy;
4382 /* There's nothing to do for NULL_TREE. */
4383 if (seq == NULL)
4384 return seq;
4386 /* Set up ID. */
4387 memset (&id, 0, sizeof (id));
4388 id.src_fn = current_function_decl;
4389 id.dst_fn = current_function_decl;
4390 id.decl_map = pointer_map_create ();
4391 id.debug_map = NULL;
4393 id.copy_decl = copy_decl_no_change;
4394 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4395 id.transform_new_cfg = false;
4396 id.transform_return_to_modify = false;
4397 id.transform_lang_insert_block = NULL;
4399 /* Walk the tree once to find local labels. */
4400 memset (&wi, 0, sizeof (wi));
4401 visited = pointer_set_create ();
4402 wi.info = &id;
4403 wi.pset = visited;
4404 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4405 pointer_set_destroy (visited);
4407 copy = gimple_seq_copy (seq);
4409 /* Walk the copy, remapping decls. */
4410 memset (&wi, 0, sizeof (wi));
4411 wi.info = &id;
4412 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4414 /* Clean up. */
4415 pointer_map_destroy (id.decl_map);
4416 if (id.debug_map)
4417 pointer_map_destroy (id.debug_map);
4419 return copy;
4423 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4425 static tree
4426 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4428 if (*tp == data)
4429 return (tree) data;
4430 else
4431 return NULL;
4434 bool
4435 debug_find_tree (tree top, tree search)
4437 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4441 /* Declare the variables created by the inliner. Add all the variables in
4442 VARS to BIND_EXPR. */
4444 static void
4445 declare_inline_vars (tree block, tree vars)
4447 tree t;
4448 for (t = vars; t; t = TREE_CHAIN (t))
4450 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4451 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4452 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4455 if (block)
4456 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4459 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4460 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4461 VAR_DECL translation. */
4463 static tree
4464 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4466 /* Don't generate debug information for the copy if we wouldn't have
4467 generated it for the copy either. */
4468 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4469 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4471 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4472 declaration inspired this copy. */
4473 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4475 /* The new variable/label has no RTL, yet. */
4476 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4477 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4478 SET_DECL_RTL (copy, NULL_RTX);
4480 /* These args would always appear unused, if not for this. */
4481 TREE_USED (copy) = 1;
4483 /* Set the context for the new declaration. */
4484 if (!DECL_CONTEXT (decl))
4485 /* Globals stay global. */
4487 else if (DECL_CONTEXT (decl) != id->src_fn)
4488 /* Things that weren't in the scope of the function we're inlining
4489 from aren't in the scope we're inlining to, either. */
4491 else if (TREE_STATIC (decl))
4492 /* Function-scoped static variables should stay in the original
4493 function. */
4495 else
4496 /* Ordinary automatic local variables are now in the scope of the
4497 new function. */
4498 DECL_CONTEXT (copy) = id->dst_fn;
4500 return copy;
4503 static tree
4504 copy_decl_to_var (tree decl, copy_body_data *id)
4506 tree copy, type;
4508 gcc_assert (TREE_CODE (decl) == PARM_DECL
4509 || TREE_CODE (decl) == RESULT_DECL);
4511 type = TREE_TYPE (decl);
4513 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4514 VAR_DECL, DECL_NAME (decl), type);
4515 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4516 TREE_READONLY (copy) = TREE_READONLY (decl);
4517 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4518 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4520 return copy_decl_for_dup_finish (id, decl, copy);
4523 /* Like copy_decl_to_var, but create a return slot object instead of a
4524 pointer variable for return by invisible reference. */
4526 static tree
4527 copy_result_decl_to_var (tree decl, copy_body_data *id)
4529 tree copy, type;
4531 gcc_assert (TREE_CODE (decl) == PARM_DECL
4532 || TREE_CODE (decl) == RESULT_DECL);
4534 type = TREE_TYPE (decl);
4535 if (DECL_BY_REFERENCE (decl))
4536 type = TREE_TYPE (type);
4538 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4539 VAR_DECL, DECL_NAME (decl), type);
4540 TREE_READONLY (copy) = TREE_READONLY (decl);
4541 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4542 if (!DECL_BY_REFERENCE (decl))
4544 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4545 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4548 return copy_decl_for_dup_finish (id, decl, copy);
4551 tree
4552 copy_decl_no_change (tree decl, copy_body_data *id)
4554 tree copy;
4556 copy = copy_node (decl);
4558 /* The COPY is not abstract; it will be generated in DST_FN. */
4559 DECL_ABSTRACT (copy) = 0;
4560 lang_hooks.dup_lang_specific_decl (copy);
4562 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4563 been taken; it's for internal bookkeeping in expand_goto_internal. */
4564 if (TREE_CODE (copy) == LABEL_DECL)
4566 TREE_ADDRESSABLE (copy) = 0;
4567 LABEL_DECL_UID (copy) = -1;
4570 return copy_decl_for_dup_finish (id, decl, copy);
4573 static tree
4574 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4576 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4577 return copy_decl_to_var (decl, id);
4578 else
4579 return copy_decl_no_change (decl, id);
4582 /* Return a copy of the function's argument tree. */
4583 static tree
4584 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4585 bitmap args_to_skip, tree *vars)
4587 tree arg, *parg;
4588 tree new_parm = NULL;
4589 int i = 0;
4591 parg = &new_parm;
4593 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4594 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4596 tree new_tree = remap_decl (arg, id);
4597 lang_hooks.dup_lang_specific_decl (new_tree);
4598 *parg = new_tree;
4599 parg = &TREE_CHAIN (new_tree);
4601 else if (!pointer_map_contains (id->decl_map, arg))
4603 /* Make an equivalent VAR_DECL. If the argument was used
4604 as temporary variable later in function, the uses will be
4605 replaced by local variable. */
4606 tree var = copy_decl_to_var (arg, id);
4607 get_var_ann (var);
4608 add_referenced_var (var);
4609 insert_decl_map (id, arg, var);
4610 /* Declare this new variable. */
4611 TREE_CHAIN (var) = *vars;
4612 *vars = var;
4614 return new_parm;
4617 /* Return a copy of the function's static chain. */
4618 static tree
4619 copy_static_chain (tree static_chain, copy_body_data * id)
4621 tree *chain_copy, *pvar;
4623 chain_copy = &static_chain;
4624 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4626 tree new_tree = remap_decl (*pvar, id);
4627 lang_hooks.dup_lang_specific_decl (new_tree);
4628 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4629 *pvar = new_tree;
4631 return static_chain;
4634 /* Return true if the function is allowed to be versioned.
4635 This is a guard for the versioning functionality. */
4637 bool
4638 tree_versionable_function_p (tree fndecl)
4640 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4641 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4644 /* Delete all unreachable basic blocks and update callgraph.
4645 Doing so is somewhat nontrivial because we need to update all clones and
4646 remove inline function that become unreachable. */
4648 static bool
4649 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4651 bool changed = false;
4652 basic_block b, next_bb;
4654 find_unreachable_blocks ();
4656 /* Delete all unreachable basic blocks. */
4658 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4660 next_bb = b->next_bb;
4662 if (!(b->flags & BB_REACHABLE))
4664 gimple_stmt_iterator bsi;
4666 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4667 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4669 struct cgraph_edge *e;
4670 struct cgraph_node *node;
4672 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4674 if (!e->inline_failed)
4675 cgraph_remove_node_and_inline_clones (e->callee);
4676 else
4677 cgraph_remove_edge (e);
4679 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4680 && id->dst_node->clones)
4681 for (node = id->dst_node->clones; node != id->dst_node;)
4683 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4685 if (!e->inline_failed)
4686 cgraph_remove_node_and_inline_clones (e->callee);
4687 else
4688 cgraph_remove_edge (e);
4691 if (node->clones)
4692 node = node->clones;
4693 else if (node->next_sibling_clone)
4694 node = node->next_sibling_clone;
4695 else
4697 while (node != id->dst_node && !node->next_sibling_clone)
4698 node = node->clone_of;
4699 if (node != id->dst_node)
4700 node = node->next_sibling_clone;
4704 delete_basic_block (b);
4705 changed = true;
4709 if (changed)
4710 tidy_fallthru_edges ();
4711 return changed;
4714 /* Update clone info after duplication. */
4716 static void
4717 update_clone_info (copy_body_data * id)
4719 struct cgraph_node *node;
4720 if (!id->dst_node->clones)
4721 return;
4722 for (node = id->dst_node->clones; node != id->dst_node;)
4724 /* First update replace maps to match the new body. */
4725 if (node->clone.tree_map)
4727 unsigned int i;
4728 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4730 struct ipa_replace_map *replace_info;
4731 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4732 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4733 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4736 if (node->clones)
4737 node = node->clones;
4738 else if (node->next_sibling_clone)
4739 node = node->next_sibling_clone;
4740 else
4742 while (node != id->dst_node && !node->next_sibling_clone)
4743 node = node->clone_of;
4744 if (node != id->dst_node)
4745 node = node->next_sibling_clone;
4750 /* Create a copy of a function's tree.
4751 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4752 of the original function and the new copied function
4753 respectively. In case we want to replace a DECL
4754 tree with another tree while duplicating the function's
4755 body, TREE_MAP represents the mapping between these
4756 trees. If UPDATE_CLONES is set, the call_stmt fields
4757 of edges of clones of the function will be updated. */
4758 void
4759 tree_function_versioning (tree old_decl, tree new_decl,
4760 VEC(ipa_replace_map_p,gc)* tree_map,
4761 bool update_clones, bitmap args_to_skip)
4763 struct cgraph_node *old_version_node;
4764 struct cgraph_node *new_version_node;
4765 copy_body_data id;
4766 tree p;
4767 unsigned i;
4768 struct ipa_replace_map *replace_info;
4769 basic_block old_entry_block, bb;
4770 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4772 tree t_step;
4773 tree old_current_function_decl = current_function_decl;
4774 tree vars = NULL_TREE;
4776 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4777 && TREE_CODE (new_decl) == FUNCTION_DECL);
4778 DECL_POSSIBLY_INLINED (old_decl) = 1;
4780 old_version_node = cgraph_node (old_decl);
4781 new_version_node = cgraph_node (new_decl);
4783 /* Output the inlining info for this abstract function, since it has been
4784 inlined. If we don't do this now, we can lose the information about the
4785 variables in the function when the blocks get blown away as soon as we
4786 remove the cgraph node. */
4787 (*debug_hooks->outlining_inline_function) (old_decl);
4789 DECL_ARTIFICIAL (new_decl) = 1;
4790 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4791 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
4793 /* Prepare the data structures for the tree copy. */
4794 memset (&id, 0, sizeof (id));
4796 /* Generate a new name for the new version. */
4797 id.statements_to_fold = pointer_set_create ();
4799 id.decl_map = pointer_map_create ();
4800 id.debug_map = NULL;
4801 id.src_fn = old_decl;
4802 id.dst_fn = new_decl;
4803 id.src_node = old_version_node;
4804 id.dst_node = new_version_node;
4805 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4806 if (id.src_node->ipa_transforms_to_apply)
4808 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
4809 unsigned int i;
4811 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
4812 id.src_node->ipa_transforms_to_apply);
4813 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
4814 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
4815 VEC_index (ipa_opt_pass,
4816 old_transforms_to_apply,
4817 i));
4820 id.copy_decl = copy_decl_no_change;
4821 id.transform_call_graph_edges
4822 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4823 id.transform_new_cfg = true;
4824 id.transform_return_to_modify = false;
4825 id.transform_lang_insert_block = NULL;
4827 current_function_decl = new_decl;
4828 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4829 (DECL_STRUCT_FUNCTION (old_decl));
4830 initialize_cfun (new_decl, old_decl,
4831 old_entry_block->count);
4832 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4834 /* Copy the function's static chain. */
4835 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4836 if (p)
4837 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4838 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4839 &id);
4841 /* If there's a tree_map, prepare for substitution. */
4842 if (tree_map)
4843 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4845 gimple init;
4846 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4847 if (replace_info->replace_p)
4849 tree op = replace_info->new_tree;
4851 STRIP_NOPS (op);
4853 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4854 op = TREE_OPERAND (op, 0);
4856 if (TREE_CODE (op) == ADDR_EXPR)
4858 op = TREE_OPERAND (op, 0);
4859 while (handled_component_p (op))
4860 op = TREE_OPERAND (op, 0);
4861 if (TREE_CODE (op) == VAR_DECL)
4862 add_referenced_var (op);
4864 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4865 init = setup_one_parameter (&id, replace_info->old_tree,
4866 replace_info->new_tree, id.src_fn,
4867 NULL,
4868 &vars);
4869 if (init)
4870 VEC_safe_push (gimple, heap, init_stmts, init);
4873 /* Copy the function's arguments. */
4874 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4875 DECL_ARGUMENTS (new_decl) =
4876 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4877 args_to_skip, &vars);
4879 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4881 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4882 number_blocks (id.dst_fn);
4884 declare_inline_vars (DECL_INITIAL (new_decl), vars);
4886 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4887 /* Add local vars. */
4888 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4889 t_step; t_step = TREE_CHAIN (t_step))
4891 tree var = TREE_VALUE (t_step);
4892 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
4893 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
4894 else if (!can_be_nonlocal (var, &id))
4895 cfun->local_decls =
4896 tree_cons (NULL_TREE, remap_decl (var, &id),
4897 cfun->local_decls);
4900 /* Copy the Function's body. */
4901 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
4902 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
4904 if (DECL_RESULT (old_decl) != NULL_TREE)
4906 tree *res_decl = &DECL_RESULT (old_decl);
4907 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4908 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4911 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4912 number_blocks (new_decl);
4914 /* We want to create the BB unconditionally, so that the addition of
4915 debug stmts doesn't affect BB count, which may in the end cause
4916 codegen differences. */
4917 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
4918 while (VEC_length (gimple, init_stmts))
4919 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
4920 update_clone_info (&id);
4922 /* Remap the nonlocal_goto_save_area, if any. */
4923 if (cfun->nonlocal_goto_save_area)
4925 struct walk_stmt_info wi;
4927 memset (&wi, 0, sizeof (wi));
4928 wi.info = &id;
4929 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
4932 /* Clean up. */
4933 pointer_map_destroy (id.decl_map);
4934 if (id.debug_map)
4935 pointer_map_destroy (id.debug_map);
4936 free_dominance_info (CDI_DOMINATORS);
4937 free_dominance_info (CDI_POST_DOMINATORS);
4939 fold_marked_statements (0, id.statements_to_fold);
4940 pointer_set_destroy (id.statements_to_fold);
4941 fold_cond_expr_cond ();
4942 delete_unreachable_blocks_update_callgraph (&id);
4943 update_ssa (TODO_update_ssa);
4944 free_dominance_info (CDI_DOMINATORS);
4945 free_dominance_info (CDI_POST_DOMINATORS);
4947 gcc_assert (!id.debug_stmts);
4948 VEC_free (gimple, heap, init_stmts);
4949 pop_cfun ();
4950 current_function_decl = old_current_function_decl;
4951 gcc_assert (!current_function_decl
4952 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
4953 return;
4956 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
4957 the callee and return the inlined body on success. */
4959 tree
4960 maybe_inline_call_in_expr (tree exp)
4962 tree fn = get_callee_fndecl (exp);
4964 /* We can only try to inline "const" functions. */
4965 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
4967 struct pointer_map_t *decl_map = pointer_map_create ();
4968 call_expr_arg_iterator iter;
4969 copy_body_data id;
4970 tree param, arg, t;
4972 /* Remap the parameters. */
4973 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
4974 param;
4975 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
4976 *pointer_map_insert (decl_map, param) = arg;
4978 memset (&id, 0, sizeof (id));
4979 id.src_fn = fn;
4980 id.dst_fn = current_function_decl;
4981 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
4982 id.decl_map = decl_map;
4984 id.copy_decl = copy_decl_no_change;
4985 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4986 id.transform_new_cfg = false;
4987 id.transform_return_to_modify = true;
4988 id.transform_lang_insert_block = false;
4990 /* Make sure not to unshare trees behind the front-end's back
4991 since front-end specific mechanisms may rely on sharing. */
4992 id.regimplify = false;
4993 id.do_not_unshare = true;
4995 /* We're not inside any EH region. */
4996 id.eh_lp_nr = 0;
4998 t = copy_tree_body (&id);
4999 pointer_map_destroy (decl_map);
5001 /* We can only return something suitable for use in a GENERIC
5002 expression tree. */
5003 if (TREE_CODE (t) == MODIFY_EXPR)
5004 return TREE_OPERAND (t, 1);
5007 return NULL_TREE;
5010 /* Duplicate a type, fields and all. */
5012 tree
5013 build_duplicate_type (tree type)
5015 struct copy_body_data id;
5017 memset (&id, 0, sizeof (id));
5018 id.src_fn = current_function_decl;
5019 id.dst_fn = current_function_decl;
5020 id.src_cfun = cfun;
5021 id.decl_map = pointer_map_create ();
5022 id.debug_map = NULL;
5023 id.copy_decl = copy_decl_no_change;
5025 type = remap_type_1 (type, &id);
5027 pointer_map_destroy (id.decl_map);
5028 if (id.debug_map)
5029 pointer_map_destroy (id.debug_map);
5031 TYPE_CANONICAL (type) = type;
5033 return type;
5036 /* Return whether it is safe to inline a function because it used different
5037 target specific options or call site actual types mismatch parameter types.
5038 E is the call edge to be checked. */
5039 bool
5040 tree_can_inline_p (struct cgraph_edge *e)
5042 #if 0
5043 /* This causes a regression in SPEC in that it prevents a cold function from
5044 inlining a hot function. Perhaps this should only apply to functions
5045 that the user declares hot/cold/optimize explicitly. */
5047 /* Don't inline a function with a higher optimization level than the
5048 caller, or with different space constraints (hot/cold functions). */
5049 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5050 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5052 if (caller_tree != callee_tree)
5054 struct cl_optimization *caller_opt
5055 = TREE_OPTIMIZATION ((caller_tree)
5056 ? caller_tree
5057 : optimization_default_node);
5059 struct cl_optimization *callee_opt
5060 = TREE_OPTIMIZATION ((callee_tree)
5061 ? callee_tree
5062 : optimization_default_node);
5064 if ((caller_opt->optimize > callee_opt->optimize)
5065 || (caller_opt->optimize_size != callee_opt->optimize_size))
5066 return false;
5068 #endif
5069 tree caller, callee;
5071 caller = e->caller->decl;
5072 callee = e->callee->decl;
5074 /* We cannot inline a function that uses a different EH personality
5075 than the caller. */
5076 if (DECL_FUNCTION_PERSONALITY (caller)
5077 && DECL_FUNCTION_PERSONALITY (callee)
5078 && (DECL_FUNCTION_PERSONALITY (caller)
5079 != DECL_FUNCTION_PERSONALITY (callee)))
5081 e->inline_failed = CIF_UNSPECIFIED;
5082 gimple_call_set_cannot_inline (e->call_stmt, true);
5083 return false;
5086 /* Allow the backend to decide if inlining is ok. */
5087 if (!targetm.target_option.can_inline_p (caller, callee))
5089 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5090 gimple_call_set_cannot_inline (e->call_stmt, true);
5091 e->call_stmt_cannot_inline_p = true;
5092 return false;
5095 if (e->call_stmt
5096 && !gimple_check_call_args (e->call_stmt))
5098 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5099 gimple_call_set_cannot_inline (e->call_stmt, true);
5100 e->call_stmt_cannot_inline_p = true;
5101 return false;
5104 return true;