PR 43726 * config/lm32/lm32.h: Remove definition of GO_IF_MODE_DEPENDENT_ADDRESS.
[official-gcc.git] / gcc / tree-inline.c
blob697c6bc269a554c59720b687e6e7351c3438d9a9
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "expr.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "insn-config.h"
34 #include "hashtab.h"
35 #include "langhooks.h"
36 #include "basic-block.h"
37 #include "tree-iterator.h"
38 #include "cgraph.h"
39 #include "intl.h"
40 #include "tree-mudflap.h"
41 #include "tree-flow.h"
42 #include "function.h"
43 #include "tree-flow.h"
44 #include "diagnostic.h"
45 #include "tree-pretty-print.h"
46 #include "except.h"
47 #include "debug.h"
48 #include "pointer-set.h"
49 #include "ipa-prop.h"
50 #include "value-prof.h"
51 #include "tree-pass.h"
52 #include "target.h"
53 #include "integrate.h"
55 /* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57 #include "gimple.h"
59 /* Inlining, Cloning, Versioning, Parallelization
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 MODIFY_EXPRs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX statements are adjusted accordingly.
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
89 See the CALL_EXPR handling case in copy_tree_body_r (). */
91 /* To Do:
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
104 /* Weights that estimate_num_insns uses to estimate the size of the
105 produced code. */
107 eni_weights eni_size_weights;
109 /* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
112 eni_weights eni_time_weights;
114 /* Prototypes. */
116 static tree declare_return_variable (copy_body_data *, tree, tree);
117 static void remap_block (tree *, copy_body_data *);
118 static void copy_bind_expr (tree *, int *, copy_body_data *);
119 static tree mark_local_for_remap_r (tree *, int *, void *);
120 static void unsave_expr_1 (tree);
121 static tree unsave_r (tree *, int *, void *);
122 static void declare_inline_vars (tree, tree);
123 static void remap_save_expr (tree *, void *, int *);
124 static void prepend_lexical_block (tree current_block, tree new_block);
125 static tree copy_decl_to_var (tree, copy_body_data *);
126 static tree copy_result_decl_to_var (tree, copy_body_data *);
127 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128 static gimple remap_gimple_stmt (gimple, copy_body_data *);
129 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
131 /* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
134 void
135 insert_decl_map (copy_body_data *id, tree key, tree value)
137 *pointer_map_insert (id->decl_map, key) = value;
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
142 *pointer_map_insert (id->decl_map, value) = value;
145 /* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
148 static void
149 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
157 if (!target_for_debug_bind (key))
158 return;
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
166 *pointer_map_insert (id->debug_map, key) = value;
169 /* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173 static int processing_debug_stmt = 0;
175 /* Construct new SSA name for old NAME. ID is the inline context. */
177 static tree
178 remap_ssa_name (tree name, copy_body_data *id)
180 tree new_tree;
181 tree *n;
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
185 n = (tree *) pointer_map_contains (id->decl_map, name);
186 if (n)
187 return unshare_expr (*n);
189 if (processing_debug_stmt)
191 processing_debug_stmt = -1;
192 return name;
195 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
196 in copy_bb. */
197 new_tree = remap_decl (SSA_NAME_VAR (name), id);
199 /* We might've substituted constant or another SSA_NAME for
200 the variable.
202 Replace the SSA name representing RESULT_DECL by variable during
203 inlining: this saves us from need to introduce PHI node in a case
204 return value is just partly initialized. */
205 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
206 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
207 || !id->transform_return_to_modify))
209 struct ptr_info_def *pi;
210 new_tree = make_ssa_name (new_tree, NULL);
211 insert_decl_map (id, name, new_tree);
212 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
213 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
214 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
215 /* At least IPA points-to info can be directly transferred. */
216 if (id->src_cfun->gimple_df
217 && id->src_cfun->gimple_df->ipa_pta
218 && (pi = SSA_NAME_PTR_INFO (name))
219 && !pi->pt.anything)
221 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
222 new_pi->pt = pi->pt;
224 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
226 /* By inlining function having uninitialized variable, we might
227 extend the lifetime (variable might get reused). This cause
228 ICE in the case we end up extending lifetime of SSA name across
229 abnormal edge, but also increase register pressure.
231 We simply initialize all uninitialized vars by 0 except
232 for case we are inlining to very first BB. We can avoid
233 this for all BBs that are not inside strongly connected
234 regions of the CFG, but this is expensive to test. */
235 if (id->entry_bb
236 && is_gimple_reg (SSA_NAME_VAR (name))
237 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
238 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
239 || EDGE_COUNT (id->entry_bb->preds) != 1))
241 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
242 gimple init_stmt;
244 init_stmt = gimple_build_assign (new_tree,
245 fold_convert (TREE_TYPE (new_tree),
246 integer_zero_node));
247 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
248 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
250 else
252 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
253 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
254 == name)
255 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
259 else
260 insert_decl_map (id, name, new_tree);
261 return new_tree;
264 /* Remap DECL during the copying of the BLOCK tree for the function. */
266 tree
267 remap_decl (tree decl, copy_body_data *id)
269 tree *n;
271 /* We only remap local variables in the current function. */
273 /* See if we have remapped this declaration. */
275 n = (tree *) pointer_map_contains (id->decl_map, decl);
277 if (!n && processing_debug_stmt)
279 processing_debug_stmt = -1;
280 return decl;
283 /* If we didn't already have an equivalent for this declaration,
284 create one now. */
285 if (!n)
287 /* Make a copy of the variable or label. */
288 tree t = id->copy_decl (decl, id);
290 /* Remember it, so that if we encounter this local entity again
291 we can reuse this copy. Do this early because remap_type may
292 need this decl for TYPE_STUB_DECL. */
293 insert_decl_map (id, decl, t);
295 if (!DECL_P (t))
296 return t;
298 /* Remap types, if necessary. */
299 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
300 if (TREE_CODE (t) == TYPE_DECL)
301 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
303 /* Remap sizes as necessary. */
304 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
305 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
307 /* If fields, do likewise for offset and qualifier. */
308 if (TREE_CODE (t) == FIELD_DECL)
310 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
311 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
312 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
315 if (cfun && gimple_in_ssa_p (cfun)
316 && (TREE_CODE (t) == VAR_DECL
317 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
319 get_var_ann (t);
320 add_referenced_var (t);
322 return t;
325 if (id->do_not_unshare)
326 return *n;
327 else
328 return unshare_expr (*n);
331 static tree
332 remap_type_1 (tree type, copy_body_data *id)
334 tree new_tree, t;
336 /* We do need a copy. build and register it now. If this is a pointer or
337 reference type, remap the designated type and make a new pointer or
338 reference type. */
339 if (TREE_CODE (type) == POINTER_TYPE)
341 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
342 TYPE_MODE (type),
343 TYPE_REF_CAN_ALIAS_ALL (type));
344 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
345 new_tree = build_type_attribute_qual_variant (new_tree,
346 TYPE_ATTRIBUTES (type),
347 TYPE_QUALS (type));
348 insert_decl_map (id, type, new_tree);
349 return new_tree;
351 else if (TREE_CODE (type) == REFERENCE_TYPE)
353 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
354 TYPE_MODE (type),
355 TYPE_REF_CAN_ALIAS_ALL (type));
356 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
357 new_tree = build_type_attribute_qual_variant (new_tree,
358 TYPE_ATTRIBUTES (type),
359 TYPE_QUALS (type));
360 insert_decl_map (id, type, new_tree);
361 return new_tree;
363 else
364 new_tree = copy_node (type);
366 insert_decl_map (id, type, new_tree);
368 /* This is a new type, not a copy of an old type. Need to reassociate
369 variants. We can handle everything except the main variant lazily. */
370 t = TYPE_MAIN_VARIANT (type);
371 if (type != t)
373 t = remap_type (t, id);
374 TYPE_MAIN_VARIANT (new_tree) = t;
375 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
376 TYPE_NEXT_VARIANT (t) = new_tree;
378 else
380 TYPE_MAIN_VARIANT (new_tree) = new_tree;
381 TYPE_NEXT_VARIANT (new_tree) = NULL;
384 if (TYPE_STUB_DECL (type))
385 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
387 /* Lazily create pointer and reference types. */
388 TYPE_POINTER_TO (new_tree) = NULL;
389 TYPE_REFERENCE_TO (new_tree) = NULL;
391 switch (TREE_CODE (new_tree))
393 case INTEGER_TYPE:
394 case REAL_TYPE:
395 case FIXED_POINT_TYPE:
396 case ENUMERAL_TYPE:
397 case BOOLEAN_TYPE:
398 t = TYPE_MIN_VALUE (new_tree);
399 if (t && TREE_CODE (t) != INTEGER_CST)
400 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
402 t = TYPE_MAX_VALUE (new_tree);
403 if (t && TREE_CODE (t) != INTEGER_CST)
404 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
405 return new_tree;
407 case FUNCTION_TYPE:
408 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
409 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
410 return new_tree;
412 case ARRAY_TYPE:
413 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
414 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
415 break;
417 case RECORD_TYPE:
418 case UNION_TYPE:
419 case QUAL_UNION_TYPE:
421 tree f, nf = NULL;
423 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
425 t = remap_decl (f, id);
426 DECL_CONTEXT (t) = new_tree;
427 TREE_CHAIN (t) = nf;
428 nf = t;
430 TYPE_FIELDS (new_tree) = nreverse (nf);
432 break;
434 case OFFSET_TYPE:
435 default:
436 /* Shouldn't have been thought variable sized. */
437 gcc_unreachable ();
440 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
441 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
443 return new_tree;
446 tree
447 remap_type (tree type, copy_body_data *id)
449 tree *node;
450 tree tmp;
452 if (type == NULL)
453 return type;
455 /* See if we have remapped this type. */
456 node = (tree *) pointer_map_contains (id->decl_map, type);
457 if (node)
458 return *node;
460 /* The type only needs remapping if it's variably modified. */
461 if (! variably_modified_type_p (type, id->src_fn))
463 insert_decl_map (id, type, type);
464 return type;
467 id->remapping_type_depth++;
468 tmp = remap_type_1 (type, id);
469 id->remapping_type_depth--;
471 return tmp;
474 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
475 is NULL or TYPE has not been remapped before. */
477 static tree
478 remapped_type (tree type, copy_body_data *id)
480 tree *node;
482 if (type == NULL)
483 return type;
485 /* See if we have remapped this type. */
486 node = (tree *) pointer_map_contains (id->decl_map, type);
487 if (node)
488 return *node;
489 else
490 return NULL;
493 /* The type only needs remapping if it's variably modified. */
494 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
496 static bool
497 can_be_nonlocal (tree decl, copy_body_data *id)
499 /* We can not duplicate function decls. */
500 if (TREE_CODE (decl) == FUNCTION_DECL)
501 return true;
503 /* Local static vars must be non-local or we get multiple declaration
504 problems. */
505 if (TREE_CODE (decl) == VAR_DECL
506 && !auto_var_in_fn_p (decl, id->src_fn))
507 return true;
509 /* At the moment dwarf2out can handle only these types of nodes. We
510 can support more later. */
511 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
512 return false;
514 /* We must use global type. We call remapped_type instead of
515 remap_type since we don't want to remap this type here if it
516 hasn't been remapped before. */
517 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
518 return false;
520 /* Wihtout SSA we can't tell if variable is used. */
521 if (!gimple_in_ssa_p (cfun))
522 return false;
524 /* Live variables must be copied so we can attach DECL_RTL. */
525 if (var_ann (decl))
526 return false;
528 return true;
531 static tree
532 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
534 tree old_var;
535 tree new_decls = NULL_TREE;
537 /* Remap its variables. */
538 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
540 tree new_var;
542 if (can_be_nonlocal (old_var, id))
544 if (TREE_CODE (old_var) == VAR_DECL
545 && ! DECL_EXTERNAL (old_var)
546 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
547 cfun->local_decls = tree_cons (NULL_TREE, old_var,
548 cfun->local_decls);
549 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
550 && !DECL_IGNORED_P (old_var)
551 && nonlocalized_list)
552 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
553 continue;
556 /* Remap the variable. */
557 new_var = remap_decl (old_var, id);
559 /* If we didn't remap this variable, we can't mess with its
560 TREE_CHAIN. If we remapped this variable to the return slot, it's
561 already declared somewhere else, so don't declare it here. */
563 if (new_var == id->retvar)
565 else if (!new_var)
567 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
568 && !DECL_IGNORED_P (old_var)
569 && nonlocalized_list)
570 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
572 else
574 gcc_assert (DECL_P (new_var));
575 TREE_CHAIN (new_var) = new_decls;
576 new_decls = new_var;
578 /* Also copy value-expressions. */
579 if (TREE_CODE (new_var) == VAR_DECL
580 && DECL_HAS_VALUE_EXPR_P (new_var))
582 tree tem = DECL_VALUE_EXPR (new_var);
583 bool old_regimplify = id->regimplify;
584 id->remapping_type_depth++;
585 walk_tree (&tem, copy_tree_body_r, id, NULL);
586 id->remapping_type_depth--;
587 id->regimplify = old_regimplify;
588 SET_DECL_VALUE_EXPR (new_var, tem);
593 return nreverse (new_decls);
596 /* Copy the BLOCK to contain remapped versions of the variables
597 therein. And hook the new block into the block-tree. */
599 static void
600 remap_block (tree *block, copy_body_data *id)
602 tree old_block;
603 tree new_block;
605 /* Make the new block. */
606 old_block = *block;
607 new_block = make_node (BLOCK);
608 TREE_USED (new_block) = TREE_USED (old_block);
609 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
610 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
611 BLOCK_NONLOCALIZED_VARS (new_block)
612 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
613 *block = new_block;
615 /* Remap its variables. */
616 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
617 &BLOCK_NONLOCALIZED_VARS (new_block),
618 id);
620 if (id->transform_lang_insert_block)
621 id->transform_lang_insert_block (new_block);
623 /* Remember the remapped block. */
624 insert_decl_map (id, old_block, new_block);
627 /* Copy the whole block tree and root it in id->block. */
628 static tree
629 remap_blocks (tree block, copy_body_data *id)
631 tree t;
632 tree new_tree = block;
634 if (!block)
635 return NULL;
637 remap_block (&new_tree, id);
638 gcc_assert (new_tree != block);
639 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
640 prepend_lexical_block (new_tree, remap_blocks (t, id));
641 /* Blocks are in arbitrary order, but make things slightly prettier and do
642 not swap order when producing a copy. */
643 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
644 return new_tree;
647 static void
648 copy_statement_list (tree *tp)
650 tree_stmt_iterator oi, ni;
651 tree new_tree;
653 new_tree = alloc_stmt_list ();
654 ni = tsi_start (new_tree);
655 oi = tsi_start (*tp);
656 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
657 *tp = new_tree;
659 for (; !tsi_end_p (oi); tsi_next (&oi))
661 tree stmt = tsi_stmt (oi);
662 if (TREE_CODE (stmt) == STATEMENT_LIST)
663 copy_statement_list (&stmt);
664 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
668 static void
669 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
671 tree block = BIND_EXPR_BLOCK (*tp);
672 /* Copy (and replace) the statement. */
673 copy_tree_r (tp, walk_subtrees, NULL);
674 if (block)
676 remap_block (&block, id);
677 BIND_EXPR_BLOCK (*tp) = block;
680 if (BIND_EXPR_VARS (*tp))
681 /* This will remap a lot of the same decls again, but this should be
682 harmless. */
683 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
687 /* Create a new gimple_seq by remapping all the statements in BODY
688 using the inlining information in ID. */
690 static gimple_seq
691 remap_gimple_seq (gimple_seq body, copy_body_data *id)
693 gimple_stmt_iterator si;
694 gimple_seq new_body = NULL;
696 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
698 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
699 gimple_seq_add_stmt (&new_body, new_stmt);
702 return new_body;
706 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
707 block using the mapping information in ID. */
709 static gimple
710 copy_gimple_bind (gimple stmt, copy_body_data *id)
712 gimple new_bind;
713 tree new_block, new_vars;
714 gimple_seq body, new_body;
716 /* Copy the statement. Note that we purposely don't use copy_stmt
717 here because we need to remap statements as we copy. */
718 body = gimple_bind_body (stmt);
719 new_body = remap_gimple_seq (body, id);
721 new_block = gimple_bind_block (stmt);
722 if (new_block)
723 remap_block (&new_block, id);
725 /* This will remap a lot of the same decls again, but this should be
726 harmless. */
727 new_vars = gimple_bind_vars (stmt);
728 if (new_vars)
729 new_vars = remap_decls (new_vars, NULL, id);
731 new_bind = gimple_build_bind (new_vars, new_body, new_block);
733 return new_bind;
737 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
738 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
739 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
740 recursing into the children nodes of *TP. */
742 static tree
743 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
745 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
746 copy_body_data *id = (copy_body_data *) wi_p->info;
747 tree fn = id->src_fn;
749 if (TREE_CODE (*tp) == SSA_NAME)
751 *tp = remap_ssa_name (*tp, id);
752 *walk_subtrees = 0;
753 return NULL;
755 else if (auto_var_in_fn_p (*tp, fn))
757 /* Local variables and labels need to be replaced by equivalent
758 variables. We don't want to copy static variables; there's
759 only one of those, no matter how many times we inline the
760 containing function. Similarly for globals from an outer
761 function. */
762 tree new_decl;
764 /* Remap the declaration. */
765 new_decl = remap_decl (*tp, id);
766 gcc_assert (new_decl);
767 /* Replace this variable with the copy. */
768 STRIP_TYPE_NOPS (new_decl);
769 /* ??? The C++ frontend uses void * pointer zero to initialize
770 any other type. This confuses the middle-end type verification.
771 As cloned bodies do not go through gimplification again the fixup
772 there doesn't trigger. */
773 if (TREE_CODE (new_decl) == INTEGER_CST
774 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
775 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
776 *tp = new_decl;
777 *walk_subtrees = 0;
779 else if (TREE_CODE (*tp) == STATEMENT_LIST)
780 gcc_unreachable ();
781 else if (TREE_CODE (*tp) == SAVE_EXPR)
782 gcc_unreachable ();
783 else if (TREE_CODE (*tp) == LABEL_DECL
784 && (!DECL_CONTEXT (*tp)
785 || decl_function_context (*tp) == id->src_fn))
786 /* These may need to be remapped for EH handling. */
787 *tp = remap_decl (*tp, id);
788 else if (TYPE_P (*tp))
789 /* Types may need remapping as well. */
790 *tp = remap_type (*tp, id);
791 else if (CONSTANT_CLASS_P (*tp))
793 /* If this is a constant, we have to copy the node iff the type
794 will be remapped. copy_tree_r will not copy a constant. */
795 tree new_type = remap_type (TREE_TYPE (*tp), id);
797 if (new_type == TREE_TYPE (*tp))
798 *walk_subtrees = 0;
800 else if (TREE_CODE (*tp) == INTEGER_CST)
801 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
802 TREE_INT_CST_HIGH (*tp));
803 else
805 *tp = copy_node (*tp);
806 TREE_TYPE (*tp) = new_type;
809 else
811 /* Otherwise, just copy the node. Note that copy_tree_r already
812 knows not to copy VAR_DECLs, etc., so this is safe. */
813 if (TREE_CODE (*tp) == INDIRECT_REF)
815 /* Get rid of *& from inline substitutions that can happen when a
816 pointer argument is an ADDR_EXPR. */
817 tree decl = TREE_OPERAND (*tp, 0);
818 tree *n;
820 n = (tree *) pointer_map_contains (id->decl_map, decl);
821 if (n)
823 tree type, new_tree, old;
825 /* If we happen to get an ADDR_EXPR in n->value, strip
826 it manually here as we'll eventually get ADDR_EXPRs
827 which lie about their types pointed to. In this case
828 build_fold_indirect_ref wouldn't strip the
829 INDIRECT_REF, but we absolutely rely on that. As
830 fold_indirect_ref does other useful transformations,
831 try that first, though. */
832 type = TREE_TYPE (TREE_TYPE (*n));
833 new_tree = unshare_expr (*n);
834 old = *tp;
835 *tp = gimple_fold_indirect_ref (new_tree);
836 if (!*tp)
838 if (TREE_CODE (new_tree) == ADDR_EXPR)
840 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
841 type, new_tree);
842 /* ??? We should either assert here or build
843 a VIEW_CONVERT_EXPR instead of blindly leaking
844 incompatible types to our IL. */
845 if (! *tp)
846 *tp = TREE_OPERAND (new_tree, 0);
848 else
850 *tp = build1 (INDIRECT_REF, type, new_tree);
851 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
852 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
855 *walk_subtrees = 0;
856 return NULL;
860 /* Here is the "usual case". Copy this tree node, and then
861 tweak some special cases. */
862 copy_tree_r (tp, walk_subtrees, NULL);
864 /* Global variables we haven't seen yet need to go into referenced
865 vars. If not referenced from types only. */
866 if (gimple_in_ssa_p (cfun)
867 && TREE_CODE (*tp) == VAR_DECL
868 && id->remapping_type_depth == 0
869 && !processing_debug_stmt)
870 add_referenced_var (*tp);
872 /* We should never have TREE_BLOCK set on non-statements. */
873 if (EXPR_P (*tp))
874 gcc_assert (!TREE_BLOCK (*tp));
876 if (TREE_CODE (*tp) != OMP_CLAUSE)
877 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
879 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
881 /* The copied TARGET_EXPR has never been expanded, even if the
882 original node was expanded already. */
883 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
884 TREE_OPERAND (*tp, 3) = NULL_TREE;
886 else if (TREE_CODE (*tp) == ADDR_EXPR)
888 /* Variable substitution need not be simple. In particular,
889 the INDIRECT_REF substitution above. Make sure that
890 TREE_CONSTANT and friends are up-to-date. But make sure
891 to not improperly set TREE_BLOCK on some sub-expressions. */
892 int invariant = is_gimple_min_invariant (*tp);
893 tree block = id->block;
894 id->block = NULL_TREE;
895 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
896 id->block = block;
898 /* Handle the case where we substituted an INDIRECT_REF
899 into the operand of the ADDR_EXPR. */
900 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
901 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
902 else
903 recompute_tree_invariant_for_addr_expr (*tp);
905 /* If this used to be invariant, but is not any longer,
906 then regimplification is probably needed. */
907 if (invariant && !is_gimple_min_invariant (*tp))
908 id->regimplify = true;
910 *walk_subtrees = 0;
914 /* Keep iterating. */
915 return NULL_TREE;
919 /* Called from copy_body_id via walk_tree. DATA is really a
920 `copy_body_data *'. */
922 tree
923 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
925 copy_body_data *id = (copy_body_data *) data;
926 tree fn = id->src_fn;
927 tree new_block;
929 /* Begin by recognizing trees that we'll completely rewrite for the
930 inlining context. Our output for these trees is completely
931 different from out input (e.g. RETURN_EXPR is deleted, and morphs
932 into an edge). Further down, we'll handle trees that get
933 duplicated and/or tweaked. */
935 /* When requested, RETURN_EXPRs should be transformed to just the
936 contained MODIFY_EXPR. The branch semantics of the return will
937 be handled elsewhere by manipulating the CFG rather than a statement. */
938 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
940 tree assignment = TREE_OPERAND (*tp, 0);
942 /* If we're returning something, just turn that into an
943 assignment into the equivalent of the original RESULT_DECL.
944 If the "assignment" is just the result decl, the result
945 decl has already been set (e.g. a recent "foo (&result_decl,
946 ...)"); just toss the entire RETURN_EXPR. */
947 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
949 /* Replace the RETURN_EXPR with (a copy of) the
950 MODIFY_EXPR hanging underneath. */
951 *tp = copy_node (assignment);
953 else /* Else the RETURN_EXPR returns no value. */
955 *tp = NULL;
956 return (tree) (void *)1;
959 else if (TREE_CODE (*tp) == SSA_NAME)
961 *tp = remap_ssa_name (*tp, id);
962 *walk_subtrees = 0;
963 return NULL;
966 /* Local variables and labels need to be replaced by equivalent
967 variables. We don't want to copy static variables; there's only
968 one of those, no matter how many times we inline the containing
969 function. Similarly for globals from an outer function. */
970 else if (auto_var_in_fn_p (*tp, fn))
972 tree new_decl;
974 /* Remap the declaration. */
975 new_decl = remap_decl (*tp, id);
976 gcc_assert (new_decl);
977 /* Replace this variable with the copy. */
978 STRIP_TYPE_NOPS (new_decl);
979 *tp = new_decl;
980 *walk_subtrees = 0;
982 else if (TREE_CODE (*tp) == STATEMENT_LIST)
983 copy_statement_list (tp);
984 else if (TREE_CODE (*tp) == SAVE_EXPR
985 || TREE_CODE (*tp) == TARGET_EXPR)
986 remap_save_expr (tp, id->decl_map, walk_subtrees);
987 else if (TREE_CODE (*tp) == LABEL_DECL
988 && (! DECL_CONTEXT (*tp)
989 || decl_function_context (*tp) == id->src_fn))
990 /* These may need to be remapped for EH handling. */
991 *tp = remap_decl (*tp, id);
992 else if (TREE_CODE (*tp) == BIND_EXPR)
993 copy_bind_expr (tp, walk_subtrees, id);
994 /* Types may need remapping as well. */
995 else if (TYPE_P (*tp))
996 *tp = remap_type (*tp, id);
998 /* If this is a constant, we have to copy the node iff the type will be
999 remapped. copy_tree_r will not copy a constant. */
1000 else if (CONSTANT_CLASS_P (*tp))
1002 tree new_type = remap_type (TREE_TYPE (*tp), id);
1004 if (new_type == TREE_TYPE (*tp))
1005 *walk_subtrees = 0;
1007 else if (TREE_CODE (*tp) == INTEGER_CST)
1008 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1009 TREE_INT_CST_HIGH (*tp));
1010 else
1012 *tp = copy_node (*tp);
1013 TREE_TYPE (*tp) = new_type;
1017 /* Otherwise, just copy the node. Note that copy_tree_r already
1018 knows not to copy VAR_DECLs, etc., so this is safe. */
1019 else
1021 /* Here we handle trees that are not completely rewritten.
1022 First we detect some inlining-induced bogosities for
1023 discarding. */
1024 if (TREE_CODE (*tp) == MODIFY_EXPR
1025 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1026 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1028 /* Some assignments VAR = VAR; don't generate any rtl code
1029 and thus don't count as variable modification. Avoid
1030 keeping bogosities like 0 = 0. */
1031 tree decl = TREE_OPERAND (*tp, 0), value;
1032 tree *n;
1034 n = (tree *) pointer_map_contains (id->decl_map, decl);
1035 if (n)
1037 value = *n;
1038 STRIP_TYPE_NOPS (value);
1039 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1041 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1042 return copy_tree_body_r (tp, walk_subtrees, data);
1046 else if (TREE_CODE (*tp) == INDIRECT_REF)
1048 /* Get rid of *& from inline substitutions that can happen when a
1049 pointer argument is an ADDR_EXPR. */
1050 tree decl = TREE_OPERAND (*tp, 0);
1051 tree *n;
1053 n = (tree *) pointer_map_contains (id->decl_map, decl);
1054 if (n)
1056 tree new_tree;
1057 tree old;
1058 /* If we happen to get an ADDR_EXPR in n->value, strip
1059 it manually here as we'll eventually get ADDR_EXPRs
1060 which lie about their types pointed to. In this case
1061 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1062 but we absolutely rely on that. As fold_indirect_ref
1063 does other useful transformations, try that first, though. */
1064 tree type = TREE_TYPE (TREE_TYPE (*n));
1065 if (id->do_not_unshare)
1066 new_tree = *n;
1067 else
1068 new_tree = unshare_expr (*n);
1069 old = *tp;
1070 *tp = gimple_fold_indirect_ref (new_tree);
1071 if (! *tp)
1073 if (TREE_CODE (new_tree) == ADDR_EXPR)
1075 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1076 type, new_tree);
1077 /* ??? We should either assert here or build
1078 a VIEW_CONVERT_EXPR instead of blindly leaking
1079 incompatible types to our IL. */
1080 if (! *tp)
1081 *tp = TREE_OPERAND (new_tree, 0);
1083 else
1085 *tp = build1 (INDIRECT_REF, type, new_tree);
1086 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1087 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1090 *walk_subtrees = 0;
1091 return NULL;
1095 /* Here is the "usual case". Copy this tree node, and then
1096 tweak some special cases. */
1097 copy_tree_r (tp, walk_subtrees, NULL);
1099 /* Global variables we haven't seen yet needs to go into referenced
1100 vars. If not referenced from types or debug stmts only. */
1101 if (gimple_in_ssa_p (cfun)
1102 && TREE_CODE (*tp) == VAR_DECL
1103 && id->remapping_type_depth == 0
1104 && !processing_debug_stmt)
1105 add_referenced_var (*tp);
1107 /* If EXPR has block defined, map it to newly constructed block.
1108 When inlining we want EXPRs without block appear in the block
1109 of function call if we are not remapping a type. */
1110 if (EXPR_P (*tp))
1112 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1113 if (TREE_BLOCK (*tp))
1115 tree *n;
1116 n = (tree *) pointer_map_contains (id->decl_map,
1117 TREE_BLOCK (*tp));
1118 gcc_assert (n || id->remapping_type_depth != 0);
1119 if (n)
1120 new_block = *n;
1122 TREE_BLOCK (*tp) = new_block;
1125 if (TREE_CODE (*tp) != OMP_CLAUSE)
1126 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1128 /* The copied TARGET_EXPR has never been expanded, even if the
1129 original node was expanded already. */
1130 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1132 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1133 TREE_OPERAND (*tp, 3) = NULL_TREE;
1136 /* Variable substitution need not be simple. In particular, the
1137 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1138 and friends are up-to-date. */
1139 else if (TREE_CODE (*tp) == ADDR_EXPR)
1141 int invariant = is_gimple_min_invariant (*tp);
1142 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1144 /* Handle the case where we substituted an INDIRECT_REF
1145 into the operand of the ADDR_EXPR. */
1146 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1147 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1148 else
1149 recompute_tree_invariant_for_addr_expr (*tp);
1151 /* If this used to be invariant, but is not any longer,
1152 then regimplification is probably needed. */
1153 if (invariant && !is_gimple_min_invariant (*tp))
1154 id->regimplify = true;
1156 *walk_subtrees = 0;
1160 /* Keep iterating. */
1161 return NULL_TREE;
1164 /* Helper for remap_gimple_stmt. Given an EH region number for the
1165 source function, map that to the duplicate EH region number in
1166 the destination function. */
1168 static int
1169 remap_eh_region_nr (int old_nr, copy_body_data *id)
1171 eh_region old_r, new_r;
1172 void **slot;
1174 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1175 slot = pointer_map_contains (id->eh_map, old_r);
1176 new_r = (eh_region) *slot;
1178 return new_r->index;
1181 /* Similar, but operate on INTEGER_CSTs. */
1183 static tree
1184 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1186 int old_nr, new_nr;
1188 old_nr = tree_low_cst (old_t_nr, 0);
1189 new_nr = remap_eh_region_nr (old_nr, id);
1191 return build_int_cst (NULL, new_nr);
1194 /* Helper for copy_bb. Remap statement STMT using the inlining
1195 information in ID. Return the new statement copy. */
1197 static gimple
1198 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1200 gimple copy = NULL;
1201 struct walk_stmt_info wi;
1202 tree new_block;
1203 bool skip_first = false;
1205 /* Begin by recognizing trees that we'll completely rewrite for the
1206 inlining context. Our output for these trees is completely
1207 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1208 into an edge). Further down, we'll handle trees that get
1209 duplicated and/or tweaked. */
1211 /* When requested, GIMPLE_RETURNs should be transformed to just the
1212 contained GIMPLE_ASSIGN. The branch semantics of the return will
1213 be handled elsewhere by manipulating the CFG rather than the
1214 statement. */
1215 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1217 tree retval = gimple_return_retval (stmt);
1219 /* If we're returning something, just turn that into an
1220 assignment into the equivalent of the original RESULT_DECL.
1221 If RETVAL is just the result decl, the result decl has
1222 already been set (e.g. a recent "foo (&result_decl, ...)");
1223 just toss the entire GIMPLE_RETURN. */
1224 if (retval && TREE_CODE (retval) != RESULT_DECL)
1226 copy = gimple_build_assign (id->retvar, retval);
1227 /* id->retvar is already substituted. Skip it on later remapping. */
1228 skip_first = true;
1230 else
1231 return gimple_build_nop ();
1233 else if (gimple_has_substatements (stmt))
1235 gimple_seq s1, s2;
1237 /* When cloning bodies from the C++ front end, we will be handed bodies
1238 in High GIMPLE form. Handle here all the High GIMPLE statements that
1239 have embedded statements. */
1240 switch (gimple_code (stmt))
1242 case GIMPLE_BIND:
1243 copy = copy_gimple_bind (stmt, id);
1244 break;
1246 case GIMPLE_CATCH:
1247 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1248 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1249 break;
1251 case GIMPLE_EH_FILTER:
1252 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1253 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1254 break;
1256 case GIMPLE_TRY:
1257 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1258 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1259 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1260 break;
1262 case GIMPLE_WITH_CLEANUP_EXPR:
1263 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1264 copy = gimple_build_wce (s1);
1265 break;
1267 case GIMPLE_OMP_PARALLEL:
1268 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1269 copy = gimple_build_omp_parallel
1270 (s1,
1271 gimple_omp_parallel_clauses (stmt),
1272 gimple_omp_parallel_child_fn (stmt),
1273 gimple_omp_parallel_data_arg (stmt));
1274 break;
1276 case GIMPLE_OMP_TASK:
1277 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1278 copy = gimple_build_omp_task
1279 (s1,
1280 gimple_omp_task_clauses (stmt),
1281 gimple_omp_task_child_fn (stmt),
1282 gimple_omp_task_data_arg (stmt),
1283 gimple_omp_task_copy_fn (stmt),
1284 gimple_omp_task_arg_size (stmt),
1285 gimple_omp_task_arg_align (stmt));
1286 break;
1288 case GIMPLE_OMP_FOR:
1289 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1290 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1291 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1292 gimple_omp_for_collapse (stmt), s2);
1294 size_t i;
1295 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1297 gimple_omp_for_set_index (copy, i,
1298 gimple_omp_for_index (stmt, i));
1299 gimple_omp_for_set_initial (copy, i,
1300 gimple_omp_for_initial (stmt, i));
1301 gimple_omp_for_set_final (copy, i,
1302 gimple_omp_for_final (stmt, i));
1303 gimple_omp_for_set_incr (copy, i,
1304 gimple_omp_for_incr (stmt, i));
1305 gimple_omp_for_set_cond (copy, i,
1306 gimple_omp_for_cond (stmt, i));
1309 break;
1311 case GIMPLE_OMP_MASTER:
1312 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1313 copy = gimple_build_omp_master (s1);
1314 break;
1316 case GIMPLE_OMP_ORDERED:
1317 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1318 copy = gimple_build_omp_ordered (s1);
1319 break;
1321 case GIMPLE_OMP_SECTION:
1322 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1323 copy = gimple_build_omp_section (s1);
1324 break;
1326 case GIMPLE_OMP_SECTIONS:
1327 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1328 copy = gimple_build_omp_sections
1329 (s1, gimple_omp_sections_clauses (stmt));
1330 break;
1332 case GIMPLE_OMP_SINGLE:
1333 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1334 copy = gimple_build_omp_single
1335 (s1, gimple_omp_single_clauses (stmt));
1336 break;
1338 case GIMPLE_OMP_CRITICAL:
1339 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1340 copy
1341 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1342 break;
1344 default:
1345 gcc_unreachable ();
1348 else
1350 if (gimple_assign_copy_p (stmt)
1351 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1352 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1354 /* Here we handle statements that are not completely rewritten.
1355 First we detect some inlining-induced bogosities for
1356 discarding. */
1358 /* Some assignments VAR = VAR; don't generate any rtl code
1359 and thus don't count as variable modification. Avoid
1360 keeping bogosities like 0 = 0. */
1361 tree decl = gimple_assign_lhs (stmt), value;
1362 tree *n;
1364 n = (tree *) pointer_map_contains (id->decl_map, decl);
1365 if (n)
1367 value = *n;
1368 STRIP_TYPE_NOPS (value);
1369 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1370 return gimple_build_nop ();
1374 if (gimple_debug_bind_p (stmt))
1376 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1377 gimple_debug_bind_get_value (stmt),
1378 stmt);
1379 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1380 return copy;
1383 /* Create a new deep copy of the statement. */
1384 copy = gimple_copy (stmt);
1386 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1387 RESX and EH_DISPATCH. */
1388 if (id->eh_map)
1389 switch (gimple_code (copy))
1391 case GIMPLE_CALL:
1393 tree r, fndecl = gimple_call_fndecl (copy);
1394 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1395 switch (DECL_FUNCTION_CODE (fndecl))
1397 case BUILT_IN_EH_COPY_VALUES:
1398 r = gimple_call_arg (copy, 1);
1399 r = remap_eh_region_tree_nr (r, id);
1400 gimple_call_set_arg (copy, 1, r);
1401 /* FALLTHRU */
1403 case BUILT_IN_EH_POINTER:
1404 case BUILT_IN_EH_FILTER:
1405 r = gimple_call_arg (copy, 0);
1406 r = remap_eh_region_tree_nr (r, id);
1407 gimple_call_set_arg (copy, 0, r);
1408 break;
1410 default:
1411 break;
1414 /* Reset alias info if we didn't apply measures to
1415 keep it valid over inlining by setting DECL_PT_UID. */
1416 if (!id->src_cfun->gimple_df
1417 || !id->src_cfun->gimple_df->ipa_pta)
1418 gimple_call_reset_alias_info (copy);
1420 break;
1422 case GIMPLE_RESX:
1424 int r = gimple_resx_region (copy);
1425 r = remap_eh_region_nr (r, id);
1426 gimple_resx_set_region (copy, r);
1428 break;
1430 case GIMPLE_EH_DISPATCH:
1432 int r = gimple_eh_dispatch_region (copy);
1433 r = remap_eh_region_nr (r, id);
1434 gimple_eh_dispatch_set_region (copy, r);
1436 break;
1438 default:
1439 break;
1443 /* If STMT has a block defined, map it to the newly constructed
1444 block. When inlining we want statements without a block to
1445 appear in the block of the function call. */
1446 new_block = id->block;
1447 if (gimple_block (copy))
1449 tree *n;
1450 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1451 gcc_assert (n);
1452 new_block = *n;
1455 gimple_set_block (copy, new_block);
1457 if (gimple_debug_bind_p (copy))
1458 return copy;
1460 /* Remap all the operands in COPY. */
1461 memset (&wi, 0, sizeof (wi));
1462 wi.info = id;
1463 if (skip_first)
1464 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1465 else
1466 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1468 /* Clear the copied virtual operands. We are not remapping them here
1469 but are going to recreate them from scratch. */
1470 if (gimple_has_mem_ops (copy))
1472 gimple_set_vdef (copy, NULL_TREE);
1473 gimple_set_vuse (copy, NULL_TREE);
1476 return copy;
1480 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1481 later */
1483 static basic_block
1484 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1485 gcov_type count_scale)
1487 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1488 basic_block copy_basic_block;
1489 tree decl;
1490 gcov_type freq;
1492 /* create_basic_block() will append every new block to
1493 basic_block_info automatically. */
1494 copy_basic_block = create_basic_block (NULL, (void *) 0,
1495 (basic_block) bb->prev_bb->aux);
1496 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1498 /* We are going to rebuild frequencies from scratch. These values
1499 have just small importance to drive canonicalize_loop_headers. */
1500 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1502 /* We recompute frequencies after inlining, so this is quite safe. */
1503 if (freq > BB_FREQ_MAX)
1504 freq = BB_FREQ_MAX;
1505 copy_basic_block->frequency = freq;
1507 copy_gsi = gsi_start_bb (copy_basic_block);
1509 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1511 gimple stmt = gsi_stmt (gsi);
1512 gimple orig_stmt = stmt;
1514 id->regimplify = false;
1515 stmt = remap_gimple_stmt (stmt, id);
1516 if (gimple_nop_p (stmt))
1517 continue;
1519 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1520 seq_gsi = copy_gsi;
1522 /* With return slot optimization we can end up with
1523 non-gimple (foo *)&this->m, fix that here. */
1524 if (is_gimple_assign (stmt)
1525 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1526 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1528 tree new_rhs;
1529 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1530 gimple_assign_rhs1 (stmt),
1531 true, NULL, false, GSI_NEW_STMT);
1532 gimple_assign_set_rhs1 (stmt, new_rhs);
1533 id->regimplify = false;
1536 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1538 if (id->regimplify)
1539 gimple_regimplify_operands (stmt, &seq_gsi);
1541 /* If copy_basic_block has been empty at the start of this iteration,
1542 call gsi_start_bb again to get at the newly added statements. */
1543 if (gsi_end_p (copy_gsi))
1544 copy_gsi = gsi_start_bb (copy_basic_block);
1545 else
1546 gsi_next (&copy_gsi);
1548 /* Process the new statement. The call to gimple_regimplify_operands
1549 possibly turned the statement into multiple statements, we
1550 need to process all of them. */
1553 tree fn;
1555 stmt = gsi_stmt (copy_gsi);
1556 if (is_gimple_call (stmt)
1557 && gimple_call_va_arg_pack_p (stmt)
1558 && id->gimple_call)
1560 /* __builtin_va_arg_pack () should be replaced by
1561 all arguments corresponding to ... in the caller. */
1562 tree p;
1563 gimple new_call;
1564 VEC(tree, heap) *argarray;
1565 size_t nargs = gimple_call_num_args (id->gimple_call);
1566 size_t n;
1568 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1569 nargs--;
1571 /* Create the new array of arguments. */
1572 n = nargs + gimple_call_num_args (stmt);
1573 argarray = VEC_alloc (tree, heap, n);
1574 VEC_safe_grow (tree, heap, argarray, n);
1576 /* Copy all the arguments before '...' */
1577 memcpy (VEC_address (tree, argarray),
1578 gimple_call_arg_ptr (stmt, 0),
1579 gimple_call_num_args (stmt) * sizeof (tree));
1581 /* Append the arguments passed in '...' */
1582 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1583 gimple_call_arg_ptr (id->gimple_call, 0)
1584 + (gimple_call_num_args (id->gimple_call) - nargs),
1585 nargs * sizeof (tree));
1587 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1588 argarray);
1590 VEC_free (tree, heap, argarray);
1592 /* Copy all GIMPLE_CALL flags, location and block, except
1593 GF_CALL_VA_ARG_PACK. */
1594 gimple_call_copy_flags (new_call, stmt);
1595 gimple_call_set_va_arg_pack (new_call, false);
1596 gimple_set_location (new_call, gimple_location (stmt));
1597 gimple_set_block (new_call, gimple_block (stmt));
1598 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1600 gsi_replace (&copy_gsi, new_call, false);
1601 gimple_set_bb (stmt, NULL);
1602 stmt = new_call;
1604 else if (is_gimple_call (stmt)
1605 && id->gimple_call
1606 && (decl = gimple_call_fndecl (stmt))
1607 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1608 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1610 /* __builtin_va_arg_pack_len () should be replaced by
1611 the number of anonymous arguments. */
1612 size_t nargs = gimple_call_num_args (id->gimple_call);
1613 tree count, p;
1614 gimple new_stmt;
1616 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1617 nargs--;
1619 count = build_int_cst (integer_type_node, nargs);
1620 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1621 gsi_replace (&copy_gsi, new_stmt, false);
1622 stmt = new_stmt;
1625 /* Statements produced by inlining can be unfolded, especially
1626 when we constant propagated some operands. We can't fold
1627 them right now for two reasons:
1628 1) folding require SSA_NAME_DEF_STMTs to be correct
1629 2) we can't change function calls to builtins.
1630 So we just mark statement for later folding. We mark
1631 all new statements, instead just statements that has changed
1632 by some nontrivial substitution so even statements made
1633 foldable indirectly are updated. If this turns out to be
1634 expensive, copy_body can be told to watch for nontrivial
1635 changes. */
1636 if (id->statements_to_fold)
1637 pointer_set_insert (id->statements_to_fold, stmt);
1639 /* We're duplicating a CALL_EXPR. Find any corresponding
1640 callgraph edges and update or duplicate them. */
1641 if (is_gimple_call (stmt))
1643 struct cgraph_edge *edge;
1644 int flags;
1646 switch (id->transform_call_graph_edges)
1648 case CB_CGE_DUPLICATE:
1649 edge = cgraph_edge (id->src_node, orig_stmt);
1650 if (edge)
1652 int edge_freq = edge->frequency;
1653 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1654 gimple_uid (stmt),
1655 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1656 edge->frequency, true);
1657 /* We could also just rescale the frequency, but
1658 doing so would introduce roundoff errors and make
1659 verifier unhappy. */
1660 edge->frequency
1661 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1662 copy_basic_block);
1663 if (dump_file
1664 && profile_status_for_function (cfun) != PROFILE_ABSENT
1665 && (edge_freq > edge->frequency + 10
1666 || edge_freq < edge->frequency - 10))
1668 fprintf (dump_file, "Edge frequency estimated by "
1669 "cgraph %i diverge from inliner's estimate %i\n",
1670 edge_freq,
1671 edge->frequency);
1672 fprintf (dump_file,
1673 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1674 bb->index,
1675 bb->frequency,
1676 copy_basic_block->frequency);
1678 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1680 break;
1682 case CB_CGE_MOVE_CLONES:
1683 cgraph_set_call_stmt_including_clones (id->dst_node,
1684 orig_stmt, stmt);
1685 edge = cgraph_edge (id->dst_node, stmt);
1686 break;
1688 case CB_CGE_MOVE:
1689 edge = cgraph_edge (id->dst_node, orig_stmt);
1690 if (edge)
1691 cgraph_set_call_stmt (edge, stmt);
1692 break;
1694 default:
1695 gcc_unreachable ();
1698 /* Constant propagation on argument done during inlining
1699 may create new direct call. Produce an edge for it. */
1700 if ((!edge
1701 || (edge->indirect_inlining_edge
1702 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1703 && (fn = gimple_call_fndecl (stmt)) != NULL)
1705 struct cgraph_node *dest = cgraph_node (fn);
1707 /* We have missing edge in the callgraph. This can happen
1708 when previous inlining turned an indirect call into a
1709 direct call by constant propagating arguments or we are
1710 producing dead clone (for further clonning). In all
1711 other cases we hit a bug (incorrect node sharing is the
1712 most common reason for missing edges). */
1713 gcc_assert (dest->needed || !dest->analyzed
1714 || dest->address_taken
1715 || !id->src_node->analyzed);
1716 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1717 cgraph_create_edge_including_clones
1718 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1719 compute_call_stmt_bb_frequency (id->dst_node->decl,
1720 copy_basic_block),
1721 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1722 else
1723 cgraph_create_edge (id->dst_node, dest, stmt,
1724 bb->count,
1725 compute_call_stmt_bb_frequency
1726 (id->dst_node->decl, copy_basic_block),
1727 bb->loop_depth)->inline_failed
1728 = CIF_ORIGINALLY_INDIRECT_CALL;
1729 if (dump_file)
1731 fprintf (dump_file, "Created new direct edge to %s",
1732 cgraph_node_name (dest));
1736 flags = gimple_call_flags (stmt);
1737 if (flags & ECF_MAY_BE_ALLOCA)
1738 cfun->calls_alloca = true;
1739 if (flags & ECF_RETURNS_TWICE)
1740 cfun->calls_setjmp = true;
1743 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1744 id->eh_map, id->eh_lp_nr);
1746 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1748 ssa_op_iter i;
1749 tree def;
1751 find_new_referenced_vars (gsi_stmt (copy_gsi));
1752 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1753 if (TREE_CODE (def) == SSA_NAME)
1754 SSA_NAME_DEF_STMT (def) = stmt;
1757 gsi_next (&copy_gsi);
1759 while (!gsi_end_p (copy_gsi));
1761 copy_gsi = gsi_last_bb (copy_basic_block);
1764 return copy_basic_block;
1767 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1768 form is quite easy, since dominator relationship for old basic blocks does
1769 not change.
1771 There is however exception where inlining might change dominator relation
1772 across EH edges from basic block within inlined functions destinating
1773 to landing pads in function we inline into.
1775 The function fills in PHI_RESULTs of such PHI nodes if they refer
1776 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1777 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1778 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1779 set, and this means that there will be no overlapping live ranges
1780 for the underlying symbol.
1782 This might change in future if we allow redirecting of EH edges and
1783 we might want to change way build CFG pre-inlining to include
1784 all the possible edges then. */
1785 static void
1786 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1787 bool can_throw, bool nonlocal_goto)
1789 edge e;
1790 edge_iterator ei;
1792 FOR_EACH_EDGE (e, ei, bb->succs)
1793 if (!e->dest->aux
1794 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1796 gimple phi;
1797 gimple_stmt_iterator si;
1799 if (!nonlocal_goto)
1800 gcc_assert (e->flags & EDGE_EH);
1802 if (!can_throw)
1803 gcc_assert (!(e->flags & EDGE_EH));
1805 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1807 edge re;
1809 phi = gsi_stmt (si);
1811 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1812 gcc_assert (!e->dest->aux);
1814 gcc_assert ((e->flags & EDGE_EH)
1815 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1817 if (!is_gimple_reg (PHI_RESULT (phi)))
1819 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1820 continue;
1823 re = find_edge (ret_bb, e->dest);
1824 gcc_assert (re);
1825 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1826 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1828 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1829 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1835 /* Copy edges from BB into its copy constructed earlier, scale profile
1836 accordingly. Edges will be taken care of later. Assume aux
1837 pointers to point to the copies of each BB. */
1839 static void
1840 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1842 basic_block new_bb = (basic_block) bb->aux;
1843 edge_iterator ei;
1844 edge old_edge;
1845 gimple_stmt_iterator si;
1846 int flags;
1848 /* Use the indices from the original blocks to create edges for the
1849 new ones. */
1850 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1851 if (!(old_edge->flags & EDGE_EH))
1853 edge new_edge;
1855 flags = old_edge->flags;
1857 /* Return edges do get a FALLTHRU flag when the get inlined. */
1858 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1859 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1860 flags |= EDGE_FALLTHRU;
1861 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1862 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1863 new_edge->probability = old_edge->probability;
1866 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1867 return;
1869 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1871 gimple copy_stmt;
1872 bool can_throw, nonlocal_goto;
1874 copy_stmt = gsi_stmt (si);
1875 if (!is_gimple_debug (copy_stmt))
1877 update_stmt (copy_stmt);
1878 if (gimple_in_ssa_p (cfun))
1879 mark_symbols_for_renaming (copy_stmt);
1882 /* Do this before the possible split_block. */
1883 gsi_next (&si);
1885 /* If this tree could throw an exception, there are two
1886 cases where we need to add abnormal edge(s): the
1887 tree wasn't in a region and there is a "current
1888 region" in the caller; or the original tree had
1889 EH edges. In both cases split the block after the tree,
1890 and add abnormal edge(s) as needed; we need both
1891 those from the callee and the caller.
1892 We check whether the copy can throw, because the const
1893 propagation can change an INDIRECT_REF which throws
1894 into a COMPONENT_REF which doesn't. If the copy
1895 can throw, the original could also throw. */
1896 can_throw = stmt_can_throw_internal (copy_stmt);
1897 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1899 if (can_throw || nonlocal_goto)
1901 if (!gsi_end_p (si))
1902 /* Note that bb's predecessor edges aren't necessarily
1903 right at this point; split_block doesn't care. */
1905 edge e = split_block (new_bb, copy_stmt);
1907 new_bb = e->dest;
1908 new_bb->aux = e->src->aux;
1909 si = gsi_start_bb (new_bb);
1913 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1914 make_eh_dispatch_edges (copy_stmt);
1915 else if (can_throw)
1916 make_eh_edges (copy_stmt);
1918 if (nonlocal_goto)
1919 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1921 if ((can_throw || nonlocal_goto)
1922 && gimple_in_ssa_p (cfun))
1923 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1924 can_throw, nonlocal_goto);
1928 /* Copy the PHIs. All blocks and edges are copied, some blocks
1929 was possibly split and new outgoing EH edges inserted.
1930 BB points to the block of original function and AUX pointers links
1931 the original and newly copied blocks. */
1933 static void
1934 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1936 basic_block const new_bb = (basic_block) bb->aux;
1937 edge_iterator ei;
1938 gimple phi;
1939 gimple_stmt_iterator si;
1941 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1943 tree res, new_res;
1944 gimple new_phi;
1945 edge new_edge;
1947 phi = gsi_stmt (si);
1948 res = PHI_RESULT (phi);
1949 new_res = res;
1950 if (is_gimple_reg (res))
1952 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1953 SSA_NAME_DEF_STMT (new_res)
1954 = new_phi = create_phi_node (new_res, new_bb);
1955 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1957 edge const old_edge
1958 = find_edge ((basic_block) new_edge->src->aux, bb);
1959 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1960 tree new_arg = arg;
1961 tree block = id->block;
1962 id->block = NULL_TREE;
1963 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1964 id->block = block;
1965 gcc_assert (new_arg);
1966 /* With return slot optimization we can end up with
1967 non-gimple (foo *)&this->m, fix that here. */
1968 if (TREE_CODE (new_arg) != SSA_NAME
1969 && TREE_CODE (new_arg) != FUNCTION_DECL
1970 && !is_gimple_val (new_arg))
1972 gimple_seq stmts = NULL;
1973 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1974 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1976 add_phi_arg (new_phi, new_arg, new_edge,
1977 gimple_phi_arg_location_from_edge (phi, old_edge));
1984 /* Wrapper for remap_decl so it can be used as a callback. */
1986 static tree
1987 remap_decl_1 (tree decl, void *data)
1989 return remap_decl (decl, (copy_body_data *) data);
1992 /* Build struct function and associated datastructures for the new clone
1993 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1995 static void
1996 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
1998 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1999 gcov_type count_scale;
2001 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2002 count_scale = (REG_BR_PROB_BASE * count
2003 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2004 else
2005 count_scale = REG_BR_PROB_BASE;
2007 /* Register specific tree functions. */
2008 gimple_register_cfg_hooks ();
2010 /* Get clean struct function. */
2011 push_struct_function (new_fndecl);
2013 /* We will rebuild these, so just sanity check that they are empty. */
2014 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2015 gcc_assert (cfun->local_decls == NULL);
2016 gcc_assert (cfun->cfg == NULL);
2017 gcc_assert (cfun->decl == new_fndecl);
2019 /* Copy items we preserve during clonning. */
2020 cfun->static_chain_decl = src_cfun->static_chain_decl;
2021 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2022 cfun->function_end_locus = src_cfun->function_end_locus;
2023 cfun->curr_properties = src_cfun->curr_properties;
2024 cfun->last_verified = src_cfun->last_verified;
2025 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2026 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2027 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2028 cfun->stdarg = src_cfun->stdarg;
2029 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2030 cfun->after_inlining = src_cfun->after_inlining;
2031 cfun->can_throw_non_call_exceptions
2032 = src_cfun->can_throw_non_call_exceptions;
2033 cfun->returns_struct = src_cfun->returns_struct;
2034 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2035 cfun->after_tree_profile = src_cfun->after_tree_profile;
2037 init_empty_tree_cfg ();
2039 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2040 ENTRY_BLOCK_PTR->count =
2041 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2042 REG_BR_PROB_BASE);
2043 ENTRY_BLOCK_PTR->frequency
2044 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2045 EXIT_BLOCK_PTR->count =
2046 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2047 REG_BR_PROB_BASE);
2048 EXIT_BLOCK_PTR->frequency =
2049 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2050 if (src_cfun->eh)
2051 init_eh_for_function ();
2053 if (src_cfun->gimple_df)
2055 init_tree_ssa (cfun);
2056 cfun->gimple_df->in_ssa_p = true;
2057 init_ssa_operands ();
2059 pop_cfun ();
2062 /* Make a copy of the body of FN so that it can be inserted inline in
2063 another function. Walks FN via CFG, returns new fndecl. */
2065 static tree
2066 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2067 basic_block entry_block_map, basic_block exit_block_map)
2069 tree callee_fndecl = id->src_fn;
2070 /* Original cfun for the callee, doesn't change. */
2071 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2072 struct function *cfun_to_copy;
2073 basic_block bb;
2074 tree new_fndecl = NULL;
2075 gcov_type count_scale;
2076 int last;
2078 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2079 count_scale = (REG_BR_PROB_BASE * count
2080 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2081 else
2082 count_scale = REG_BR_PROB_BASE;
2084 /* Register specific tree functions. */
2085 gimple_register_cfg_hooks ();
2087 /* Must have a CFG here at this point. */
2088 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2089 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2091 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2093 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2094 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2095 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2096 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2098 /* Duplicate any exception-handling regions. */
2099 if (cfun->eh)
2100 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2101 remap_decl_1, id);
2103 /* Use aux pointers to map the original blocks to copy. */
2104 FOR_EACH_BB_FN (bb, cfun_to_copy)
2106 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2107 bb->aux = new_bb;
2108 new_bb->aux = bb;
2111 last = last_basic_block;
2113 /* Now that we've duplicated the blocks, duplicate their edges. */
2114 FOR_ALL_BB_FN (bb, cfun_to_copy)
2115 copy_edges_for_bb (bb, count_scale, exit_block_map);
2117 if (gimple_in_ssa_p (cfun))
2118 FOR_ALL_BB_FN (bb, cfun_to_copy)
2119 copy_phis_for_bb (bb, id);
2121 FOR_ALL_BB_FN (bb, cfun_to_copy)
2123 ((basic_block)bb->aux)->aux = NULL;
2124 bb->aux = NULL;
2127 /* Zero out AUX fields of newly created block during EH edge
2128 insertion. */
2129 for (; last < last_basic_block; last++)
2130 BASIC_BLOCK (last)->aux = NULL;
2131 entry_block_map->aux = NULL;
2132 exit_block_map->aux = NULL;
2134 if (id->eh_map)
2136 pointer_map_destroy (id->eh_map);
2137 id->eh_map = NULL;
2140 return new_fndecl;
2143 /* Copy the debug STMT using ID. We deal with these statements in a
2144 special way: if any variable in their VALUE expression wasn't
2145 remapped yet, we won't remap it, because that would get decl uids
2146 out of sync, causing codegen differences between -g and -g0. If
2147 this arises, we drop the VALUE expression altogether. */
2149 static void
2150 copy_debug_stmt (gimple stmt, copy_body_data *id)
2152 tree t, *n;
2153 struct walk_stmt_info wi;
2155 t = id->block;
2156 if (gimple_block (stmt))
2158 tree *n;
2159 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2160 if (n)
2161 t = *n;
2163 gimple_set_block (stmt, t);
2165 /* Remap all the operands in COPY. */
2166 memset (&wi, 0, sizeof (wi));
2167 wi.info = id;
2169 processing_debug_stmt = 1;
2171 t = gimple_debug_bind_get_var (stmt);
2173 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2174 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2176 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2177 t = *n;
2179 else if (TREE_CODE (t) == VAR_DECL
2180 && !TREE_STATIC (t)
2181 && gimple_in_ssa_p (cfun)
2182 && !pointer_map_contains (id->decl_map, t)
2183 && !var_ann (t))
2184 /* T is a non-localized variable. */;
2185 else
2186 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2188 gimple_debug_bind_set_var (stmt, t);
2190 if (gimple_debug_bind_has_value_p (stmt))
2191 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2192 remap_gimple_op_r, &wi, NULL);
2194 /* Punt if any decl couldn't be remapped. */
2195 if (processing_debug_stmt < 0)
2196 gimple_debug_bind_reset_value (stmt);
2198 processing_debug_stmt = 0;
2200 update_stmt (stmt);
2201 if (gimple_in_ssa_p (cfun))
2202 mark_symbols_for_renaming (stmt);
2205 /* Process deferred debug stmts. In order to give values better odds
2206 of being successfully remapped, we delay the processing of debug
2207 stmts until all other stmts that might require remapping are
2208 processed. */
2210 static void
2211 copy_debug_stmts (copy_body_data *id)
2213 size_t i;
2214 gimple stmt;
2216 if (!id->debug_stmts)
2217 return;
2219 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2220 copy_debug_stmt (stmt, id);
2222 VEC_free (gimple, heap, id->debug_stmts);
2225 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2226 another function. */
2228 static tree
2229 copy_tree_body (copy_body_data *id)
2231 tree fndecl = id->src_fn;
2232 tree body = DECL_SAVED_TREE (fndecl);
2234 walk_tree (&body, copy_tree_body_r, id, NULL);
2236 return body;
2239 /* Make a copy of the body of FN so that it can be inserted inline in
2240 another function. */
2242 static tree
2243 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2244 basic_block entry_block_map, basic_block exit_block_map)
2246 tree fndecl = id->src_fn;
2247 tree body;
2249 /* If this body has a CFG, walk CFG and copy. */
2250 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2251 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map);
2252 copy_debug_stmts (id);
2254 return body;
2257 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2258 defined in function FN, or of a data member thereof. */
2260 static bool
2261 self_inlining_addr_expr (tree value, tree fn)
2263 tree var;
2265 if (TREE_CODE (value) != ADDR_EXPR)
2266 return false;
2268 var = get_base_address (TREE_OPERAND (value, 0));
2270 return var && auto_var_in_fn_p (var, fn);
2273 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2274 lexical block and line number information from base_stmt, if given,
2275 or from the last stmt of the block otherwise. */
2277 static gimple
2278 insert_init_debug_bind (copy_body_data *id,
2279 basic_block bb, tree var, tree value,
2280 gimple base_stmt)
2282 gimple note;
2283 gimple_stmt_iterator gsi;
2284 tree tracked_var;
2286 if (!gimple_in_ssa_p (id->src_cfun))
2287 return NULL;
2289 if (!MAY_HAVE_DEBUG_STMTS)
2290 return NULL;
2292 tracked_var = target_for_debug_bind (var);
2293 if (!tracked_var)
2294 return NULL;
2296 if (bb)
2298 gsi = gsi_last_bb (bb);
2299 if (!base_stmt && !gsi_end_p (gsi))
2300 base_stmt = gsi_stmt (gsi);
2303 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2305 if (bb)
2307 if (!gsi_end_p (gsi))
2308 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2309 else
2310 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2313 return note;
2316 static void
2317 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2319 /* If VAR represents a zero-sized variable, it's possible that the
2320 assignment statement may result in no gimple statements. */
2321 if (init_stmt)
2323 gimple_stmt_iterator si = gsi_last_bb (bb);
2325 /* We can end up with init statements that store to a non-register
2326 from a rhs with a conversion. Handle that here by forcing the
2327 rhs into a temporary. gimple_regimplify_operands is not
2328 prepared to do this for us. */
2329 if (!is_gimple_debug (init_stmt)
2330 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2331 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2332 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2334 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2335 gimple_expr_type (init_stmt),
2336 gimple_assign_rhs1 (init_stmt));
2337 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2338 GSI_NEW_STMT);
2339 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2340 gimple_assign_set_rhs1 (init_stmt, rhs);
2342 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2343 gimple_regimplify_operands (init_stmt, &si);
2344 mark_symbols_for_renaming (init_stmt);
2346 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2348 tree var, def = gimple_assign_lhs (init_stmt);
2350 if (TREE_CODE (def) == SSA_NAME)
2351 var = SSA_NAME_VAR (def);
2352 else
2353 var = def;
2355 insert_init_debug_bind (id, bb, var, def, init_stmt);
2360 /* Initialize parameter P with VALUE. If needed, produce init statement
2361 at the end of BB. When BB is NULL, we return init statement to be
2362 output later. */
2363 static gimple
2364 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2365 basic_block bb, tree *vars)
2367 gimple init_stmt = NULL;
2368 tree var;
2369 tree rhs = value;
2370 tree def = (gimple_in_ssa_p (cfun)
2371 ? gimple_default_def (id->src_cfun, p) : NULL);
2373 if (value
2374 && value != error_mark_node
2375 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2377 if (fold_convertible_p (TREE_TYPE (p), value))
2378 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2379 else
2380 /* ??? For valid (GIMPLE) programs we should not end up here.
2381 Still if something has gone wrong and we end up with truly
2382 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2383 to not leak invalid GIMPLE to the following passes. */
2384 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2387 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2388 here since the type of this decl must be visible to the calling
2389 function. */
2390 var = copy_decl_to_var (p, id);
2392 /* We're actually using the newly-created var. */
2393 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2395 get_var_ann (var);
2396 add_referenced_var (var);
2399 /* Declare this new variable. */
2400 TREE_CHAIN (var) = *vars;
2401 *vars = var;
2403 /* Make gimplifier happy about this variable. */
2404 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2406 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2407 we would not need to create a new variable here at all, if it
2408 weren't for debug info. Still, we can just use the argument
2409 value. */
2410 if (TREE_READONLY (p)
2411 && !TREE_ADDRESSABLE (p)
2412 && value && !TREE_SIDE_EFFECTS (value)
2413 && !def)
2415 /* We may produce non-gimple trees by adding NOPs or introduce
2416 invalid sharing when operand is not really constant.
2417 It is not big deal to prohibit constant propagation here as
2418 we will constant propagate in DOM1 pass anyway. */
2419 if (is_gimple_min_invariant (value)
2420 && useless_type_conversion_p (TREE_TYPE (p),
2421 TREE_TYPE (value))
2422 /* We have to be very careful about ADDR_EXPR. Make sure
2423 the base variable isn't a local variable of the inlined
2424 function, e.g., when doing recursive inlining, direct or
2425 mutually-recursive or whatever, which is why we don't
2426 just test whether fn == current_function_decl. */
2427 && ! self_inlining_addr_expr (value, fn))
2429 insert_decl_map (id, p, value);
2430 insert_debug_decl_map (id, p, var);
2431 return insert_init_debug_bind (id, bb, var, value, NULL);
2435 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2436 that way, when the PARM_DECL is encountered, it will be
2437 automatically replaced by the VAR_DECL. */
2438 insert_decl_map (id, p, var);
2440 /* Even if P was TREE_READONLY, the new VAR should not be.
2441 In the original code, we would have constructed a
2442 temporary, and then the function body would have never
2443 changed the value of P. However, now, we will be
2444 constructing VAR directly. The constructor body may
2445 change its value multiple times as it is being
2446 constructed. Therefore, it must not be TREE_READONLY;
2447 the back-end assumes that TREE_READONLY variable is
2448 assigned to only once. */
2449 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2450 TREE_READONLY (var) = 0;
2452 /* If there is no setup required and we are in SSA, take the easy route
2453 replacing all SSA names representing the function parameter by the
2454 SSA name passed to function.
2456 We need to construct map for the variable anyway as it might be used
2457 in different SSA names when parameter is set in function.
2459 Do replacement at -O0 for const arguments replaced by constant.
2460 This is important for builtin_constant_p and other construct requiring
2461 constant argument to be visible in inlined function body. */
2462 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2463 && (optimize
2464 || (TREE_READONLY (p)
2465 && is_gimple_min_invariant (rhs)))
2466 && (TREE_CODE (rhs) == SSA_NAME
2467 || is_gimple_min_invariant (rhs))
2468 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2470 insert_decl_map (id, def, rhs);
2471 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2474 /* If the value of argument is never used, don't care about initializing
2475 it. */
2476 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2478 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2479 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2482 /* Initialize this VAR_DECL from the equivalent argument. Convert
2483 the argument to the proper type in case it was promoted. */
2484 if (value)
2486 if (rhs == error_mark_node)
2488 insert_decl_map (id, p, var);
2489 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2492 STRIP_USELESS_TYPE_CONVERSION (rhs);
2494 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2495 keep our trees in gimple form. */
2496 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2498 def = remap_ssa_name (def, id);
2499 init_stmt = gimple_build_assign (def, rhs);
2500 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2501 set_default_def (var, NULL);
2503 else
2504 init_stmt = gimple_build_assign (var, rhs);
2506 if (bb && init_stmt)
2507 insert_init_stmt (id, bb, init_stmt);
2509 return init_stmt;
2512 /* Generate code to initialize the parameters of the function at the
2513 top of the stack in ID from the GIMPLE_CALL STMT. */
2515 static void
2516 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2517 tree fn, basic_block bb)
2519 tree parms;
2520 size_t i;
2521 tree p;
2522 tree vars = NULL_TREE;
2523 tree static_chain = gimple_call_chain (stmt);
2525 /* Figure out what the parameters are. */
2526 parms = DECL_ARGUMENTS (fn);
2528 /* Loop through the parameter declarations, replacing each with an
2529 equivalent VAR_DECL, appropriately initialized. */
2530 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2532 tree val;
2533 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2534 setup_one_parameter (id, p, val, fn, bb, &vars);
2537 /* Initialize the static chain. */
2538 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2539 gcc_assert (fn != current_function_decl);
2540 if (p)
2542 /* No static chain? Seems like a bug in tree-nested.c. */
2543 gcc_assert (static_chain);
2545 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2548 declare_inline_vars (id->block, vars);
2552 /* Declare a return variable to replace the RESULT_DECL for the
2553 function we are calling. An appropriate DECL_STMT is returned.
2554 The USE_STMT is filled to contain a use of the declaration to
2555 indicate the return value of the function.
2557 RETURN_SLOT, if non-null is place where to store the result. It
2558 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2559 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2561 The return value is a (possibly null) value that holds the result
2562 as seen by the caller. */
2564 static tree
2565 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
2567 tree callee = id->src_fn;
2568 tree caller = id->dst_fn;
2569 tree result = DECL_RESULT (callee);
2570 tree callee_type = TREE_TYPE (result);
2571 tree caller_type;
2572 tree var, use;
2574 /* Handle type-mismatches in the function declaration return type
2575 vs. the call expression. */
2576 if (modify_dest)
2577 caller_type = TREE_TYPE (modify_dest);
2578 else
2579 caller_type = TREE_TYPE (TREE_TYPE (callee));
2581 /* We don't need to do anything for functions that don't return
2582 anything. */
2583 if (!result || VOID_TYPE_P (callee_type))
2584 return NULL_TREE;
2586 /* If there was a return slot, then the return value is the
2587 dereferenced address of that object. */
2588 if (return_slot)
2590 /* The front end shouldn't have used both return_slot and
2591 a modify expression. */
2592 gcc_assert (!modify_dest);
2593 if (DECL_BY_REFERENCE (result))
2595 tree return_slot_addr = build_fold_addr_expr (return_slot);
2596 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2598 /* We are going to construct *&return_slot and we can't do that
2599 for variables believed to be not addressable.
2601 FIXME: This check possibly can match, because values returned
2602 via return slot optimization are not believed to have address
2603 taken by alias analysis. */
2604 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2605 if (gimple_in_ssa_p (cfun))
2607 HOST_WIDE_INT bitsize;
2608 HOST_WIDE_INT bitpos;
2609 tree offset;
2610 enum machine_mode mode;
2611 int unsignedp;
2612 int volatilep;
2613 tree base;
2614 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2615 &offset,
2616 &mode, &unsignedp, &volatilep,
2617 false);
2618 if (TREE_CODE (base) == INDIRECT_REF)
2619 base = TREE_OPERAND (base, 0);
2620 if (TREE_CODE (base) == SSA_NAME)
2621 base = SSA_NAME_VAR (base);
2622 mark_sym_for_renaming (base);
2624 var = return_slot_addr;
2626 else
2628 var = return_slot;
2629 gcc_assert (TREE_CODE (var) != SSA_NAME);
2630 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2632 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2633 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2634 && !DECL_GIMPLE_REG_P (result)
2635 && DECL_P (var))
2636 DECL_GIMPLE_REG_P (var) = 0;
2637 use = NULL;
2638 goto done;
2641 /* All types requiring non-trivial constructors should have been handled. */
2642 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2644 /* Attempt to avoid creating a new temporary variable. */
2645 if (modify_dest
2646 && TREE_CODE (modify_dest) != SSA_NAME)
2648 bool use_it = false;
2650 /* We can't use MODIFY_DEST if there's type promotion involved. */
2651 if (!useless_type_conversion_p (callee_type, caller_type))
2652 use_it = false;
2654 /* ??? If we're assigning to a variable sized type, then we must
2655 reuse the destination variable, because we've no good way to
2656 create variable sized temporaries at this point. */
2657 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2658 use_it = true;
2660 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2661 reuse it as the result of the call directly. Don't do this if
2662 it would promote MODIFY_DEST to addressable. */
2663 else if (TREE_ADDRESSABLE (result))
2664 use_it = false;
2665 else
2667 tree base_m = get_base_address (modify_dest);
2669 /* If the base isn't a decl, then it's a pointer, and we don't
2670 know where that's going to go. */
2671 if (!DECL_P (base_m))
2672 use_it = false;
2673 else if (is_global_var (base_m))
2674 use_it = false;
2675 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2676 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2677 && !DECL_GIMPLE_REG_P (result)
2678 && DECL_GIMPLE_REG_P (base_m))
2679 use_it = false;
2680 else if (!TREE_ADDRESSABLE (base_m))
2681 use_it = true;
2684 if (use_it)
2686 var = modify_dest;
2687 use = NULL;
2688 goto done;
2692 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2694 var = copy_result_decl_to_var (result, id);
2695 if (gimple_in_ssa_p (cfun))
2697 get_var_ann (var);
2698 add_referenced_var (var);
2701 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2702 DECL_STRUCT_FUNCTION (caller)->local_decls
2703 = tree_cons (NULL_TREE, var,
2704 DECL_STRUCT_FUNCTION (caller)->local_decls);
2706 /* Do not have the rest of GCC warn about this variable as it should
2707 not be visible to the user. */
2708 TREE_NO_WARNING (var) = 1;
2710 declare_inline_vars (id->block, var);
2712 /* Build the use expr. If the return type of the function was
2713 promoted, convert it back to the expected type. */
2714 use = var;
2715 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2716 use = fold_convert (caller_type, var);
2718 STRIP_USELESS_TYPE_CONVERSION (use);
2720 if (DECL_BY_REFERENCE (result))
2722 TREE_ADDRESSABLE (var) = 1;
2723 var = build_fold_addr_expr (var);
2726 done:
2727 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2728 way, when the RESULT_DECL is encountered, it will be
2729 automatically replaced by the VAR_DECL. */
2730 insert_decl_map (id, result, var);
2732 /* Remember this so we can ignore it in remap_decls. */
2733 id->retvar = var;
2735 return use;
2738 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2739 to a local label. */
2741 static tree
2742 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2744 tree node = *nodep;
2745 tree fn = (tree) fnp;
2747 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2748 return node;
2750 if (TYPE_P (node))
2751 *walk_subtrees = 0;
2753 return NULL_TREE;
2756 /* Determine if the function can be copied. If so return NULL. If
2757 not return a string describng the reason for failure. */
2759 static const char *
2760 copy_forbidden (struct function *fun, tree fndecl)
2762 const char *reason = fun->cannot_be_copied_reason;
2763 tree step;
2765 /* Only examine the function once. */
2766 if (fun->cannot_be_copied_set)
2767 return reason;
2769 /* We cannot copy a function that receives a non-local goto
2770 because we cannot remap the destination label used in the
2771 function that is performing the non-local goto. */
2772 /* ??? Actually, this should be possible, if we work at it.
2773 No doubt there's just a handful of places that simply
2774 assume it doesn't happen and don't substitute properly. */
2775 if (fun->has_nonlocal_label)
2777 reason = G_("function %q+F can never be copied "
2778 "because it receives a non-local goto");
2779 goto fail;
2782 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2784 tree decl = TREE_VALUE (step);
2786 if (TREE_CODE (decl) == VAR_DECL
2787 && TREE_STATIC (decl)
2788 && !DECL_EXTERNAL (decl)
2789 && DECL_INITIAL (decl)
2790 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2791 has_label_address_in_static_1,
2792 fndecl))
2794 reason = G_("function %q+F can never be copied because it saves "
2795 "address of local label in a static variable");
2796 goto fail;
2800 fail:
2801 fun->cannot_be_copied_reason = reason;
2802 fun->cannot_be_copied_set = true;
2803 return reason;
2807 static const char *inline_forbidden_reason;
2809 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2810 iff a function can not be inlined. Also sets the reason why. */
2812 static tree
2813 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2814 struct walk_stmt_info *wip)
2816 tree fn = (tree) wip->info;
2817 tree t;
2818 gimple stmt = gsi_stmt (*gsi);
2820 switch (gimple_code (stmt))
2822 case GIMPLE_CALL:
2823 /* Refuse to inline alloca call unless user explicitly forced so as
2824 this may change program's memory overhead drastically when the
2825 function using alloca is called in loop. In GCC present in
2826 SPEC2000 inlining into schedule_block cause it to require 2GB of
2827 RAM instead of 256MB. */
2828 if (gimple_alloca_call_p (stmt)
2829 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2831 inline_forbidden_reason
2832 = G_("function %q+F can never be inlined because it uses "
2833 "alloca (override using the always_inline attribute)");
2834 *handled_ops_p = true;
2835 return fn;
2838 t = gimple_call_fndecl (stmt);
2839 if (t == NULL_TREE)
2840 break;
2842 /* We cannot inline functions that call setjmp. */
2843 if (setjmp_call_p (t))
2845 inline_forbidden_reason
2846 = G_("function %q+F can never be inlined because it uses setjmp");
2847 *handled_ops_p = true;
2848 return t;
2851 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2852 switch (DECL_FUNCTION_CODE (t))
2854 /* We cannot inline functions that take a variable number of
2855 arguments. */
2856 case BUILT_IN_VA_START:
2857 case BUILT_IN_NEXT_ARG:
2858 case BUILT_IN_VA_END:
2859 inline_forbidden_reason
2860 = G_("function %q+F can never be inlined because it "
2861 "uses variable argument lists");
2862 *handled_ops_p = true;
2863 return t;
2865 case BUILT_IN_LONGJMP:
2866 /* We can't inline functions that call __builtin_longjmp at
2867 all. The non-local goto machinery really requires the
2868 destination be in a different function. If we allow the
2869 function calling __builtin_longjmp to be inlined into the
2870 function calling __builtin_setjmp, Things will Go Awry. */
2871 inline_forbidden_reason
2872 = G_("function %q+F can never be inlined because "
2873 "it uses setjmp-longjmp exception handling");
2874 *handled_ops_p = true;
2875 return t;
2877 case BUILT_IN_NONLOCAL_GOTO:
2878 /* Similarly. */
2879 inline_forbidden_reason
2880 = G_("function %q+F can never be inlined because "
2881 "it uses non-local goto");
2882 *handled_ops_p = true;
2883 return t;
2885 case BUILT_IN_RETURN:
2886 case BUILT_IN_APPLY_ARGS:
2887 /* If a __builtin_apply_args caller would be inlined,
2888 it would be saving arguments of the function it has
2889 been inlined into. Similarly __builtin_return would
2890 return from the function the inline has been inlined into. */
2891 inline_forbidden_reason
2892 = G_("function %q+F can never be inlined because "
2893 "it uses __builtin_return or __builtin_apply_args");
2894 *handled_ops_p = true;
2895 return t;
2897 default:
2898 break;
2900 break;
2902 case GIMPLE_GOTO:
2903 t = gimple_goto_dest (stmt);
2905 /* We will not inline a function which uses computed goto. The
2906 addresses of its local labels, which may be tucked into
2907 global storage, are of course not constant across
2908 instantiations, which causes unexpected behavior. */
2909 if (TREE_CODE (t) != LABEL_DECL)
2911 inline_forbidden_reason
2912 = G_("function %q+F can never be inlined "
2913 "because it contains a computed goto");
2914 *handled_ops_p = true;
2915 return t;
2917 break;
2919 default:
2920 break;
2923 *handled_ops_p = false;
2924 return NULL_TREE;
2927 /* Return true if FNDECL is a function that cannot be inlined into
2928 another one. */
2930 static bool
2931 inline_forbidden_p (tree fndecl)
2933 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2934 struct walk_stmt_info wi;
2935 struct pointer_set_t *visited_nodes;
2936 basic_block bb;
2937 bool forbidden_p = false;
2939 /* First check for shared reasons not to copy the code. */
2940 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2941 if (inline_forbidden_reason != NULL)
2942 return true;
2944 /* Next, walk the statements of the function looking for
2945 constraucts we can't handle, or are non-optimal for inlining. */
2946 visited_nodes = pointer_set_create ();
2947 memset (&wi, 0, sizeof (wi));
2948 wi.info = (void *) fndecl;
2949 wi.pset = visited_nodes;
2951 FOR_EACH_BB_FN (bb, fun)
2953 gimple ret;
2954 gimple_seq seq = bb_seq (bb);
2955 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
2956 forbidden_p = (ret != NULL);
2957 if (forbidden_p)
2958 break;
2961 pointer_set_destroy (visited_nodes);
2962 return forbidden_p;
2965 /* Return true if CALLEE cannot be inlined into CALLER. */
2967 static bool
2968 inline_forbidden_into_p (tree caller, tree callee)
2970 /* Don't inline if the functions have different EH personalities. */
2971 if (DECL_FUNCTION_PERSONALITY (caller)
2972 && DECL_FUNCTION_PERSONALITY (callee)
2973 && (DECL_FUNCTION_PERSONALITY (caller)
2974 != DECL_FUNCTION_PERSONALITY (callee)))
2975 return true;
2977 /* Don't inline if the callee can throw non-call exceptions but the
2978 caller cannot. */
2979 if (DECL_STRUCT_FUNCTION (callee)
2980 && DECL_STRUCT_FUNCTION (callee)->can_throw_non_call_exceptions
2981 && !(DECL_STRUCT_FUNCTION (caller)
2982 && DECL_STRUCT_FUNCTION (caller)->can_throw_non_call_exceptions))
2983 return true;
2985 return false;
2988 /* Returns nonzero if FN is a function that does not have any
2989 fundamental inline blocking properties. */
2991 bool
2992 tree_inlinable_function_p (tree fn)
2994 bool inlinable = true;
2995 bool do_warning;
2996 tree always_inline;
2998 /* If we've already decided this function shouldn't be inlined,
2999 there's no need to check again. */
3000 if (DECL_UNINLINABLE (fn))
3001 return false;
3003 /* We only warn for functions declared `inline' by the user. */
3004 do_warning = (warn_inline
3005 && DECL_DECLARED_INLINE_P (fn)
3006 && !DECL_NO_INLINE_WARNING_P (fn)
3007 && !DECL_IN_SYSTEM_HEADER (fn));
3009 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3011 if (flag_no_inline
3012 && always_inline == NULL)
3014 if (do_warning)
3015 warning (OPT_Winline, "function %q+F can never be inlined because it "
3016 "is suppressed using -fno-inline", fn);
3017 inlinable = false;
3020 /* Don't auto-inline anything that might not be bound within
3021 this unit of translation. */
3022 else if (!DECL_DECLARED_INLINE_P (fn)
3023 && DECL_REPLACEABLE_P (fn))
3024 inlinable = false;
3026 else if (!function_attribute_inlinable_p (fn))
3028 if (do_warning)
3029 warning (OPT_Winline, "function %q+F can never be inlined because it "
3030 "uses attributes conflicting with inlining", fn);
3031 inlinable = false;
3034 else if (inline_forbidden_p (fn))
3036 /* See if we should warn about uninlinable functions. Previously,
3037 some of these warnings would be issued while trying to expand
3038 the function inline, but that would cause multiple warnings
3039 about functions that would for example call alloca. But since
3040 this a property of the function, just one warning is enough.
3041 As a bonus we can now give more details about the reason why a
3042 function is not inlinable. */
3043 if (always_inline)
3044 sorry (inline_forbidden_reason, fn);
3045 else if (do_warning)
3046 warning (OPT_Winline, inline_forbidden_reason, fn);
3048 inlinable = false;
3051 /* Squirrel away the result so that we don't have to check again. */
3052 DECL_UNINLINABLE (fn) = !inlinable;
3054 return inlinable;
3057 /* Estimate the cost of a memory move. Use machine dependent
3058 word size and take possible memcpy call into account. */
3061 estimate_move_cost (tree type)
3063 HOST_WIDE_INT size;
3065 gcc_assert (!VOID_TYPE_P (type));
3067 size = int_size_in_bytes (type);
3069 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3070 /* Cost of a memcpy call, 3 arguments and the call. */
3071 return 4;
3072 else
3073 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3076 /* Returns cost of operation CODE, according to WEIGHTS */
3078 static int
3079 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3080 tree op1 ATTRIBUTE_UNUSED, tree op2)
3082 switch (code)
3084 /* These are "free" conversions, or their presumed cost
3085 is folded into other operations. */
3086 case RANGE_EXPR:
3087 CASE_CONVERT:
3088 case COMPLEX_EXPR:
3089 case PAREN_EXPR:
3090 return 0;
3092 /* Assign cost of 1 to usual operations.
3093 ??? We may consider mapping RTL costs to this. */
3094 case COND_EXPR:
3095 case VEC_COND_EXPR:
3097 case PLUS_EXPR:
3098 case POINTER_PLUS_EXPR:
3099 case MINUS_EXPR:
3100 case MULT_EXPR:
3102 case ADDR_SPACE_CONVERT_EXPR:
3103 case FIXED_CONVERT_EXPR:
3104 case FIX_TRUNC_EXPR:
3106 case NEGATE_EXPR:
3107 case FLOAT_EXPR:
3108 case MIN_EXPR:
3109 case MAX_EXPR:
3110 case ABS_EXPR:
3112 case LSHIFT_EXPR:
3113 case RSHIFT_EXPR:
3114 case LROTATE_EXPR:
3115 case RROTATE_EXPR:
3116 case VEC_LSHIFT_EXPR:
3117 case VEC_RSHIFT_EXPR:
3119 case BIT_IOR_EXPR:
3120 case BIT_XOR_EXPR:
3121 case BIT_AND_EXPR:
3122 case BIT_NOT_EXPR:
3124 case TRUTH_ANDIF_EXPR:
3125 case TRUTH_ORIF_EXPR:
3126 case TRUTH_AND_EXPR:
3127 case TRUTH_OR_EXPR:
3128 case TRUTH_XOR_EXPR:
3129 case TRUTH_NOT_EXPR:
3131 case LT_EXPR:
3132 case LE_EXPR:
3133 case GT_EXPR:
3134 case GE_EXPR:
3135 case EQ_EXPR:
3136 case NE_EXPR:
3137 case ORDERED_EXPR:
3138 case UNORDERED_EXPR:
3140 case UNLT_EXPR:
3141 case UNLE_EXPR:
3142 case UNGT_EXPR:
3143 case UNGE_EXPR:
3144 case UNEQ_EXPR:
3145 case LTGT_EXPR:
3147 case CONJ_EXPR:
3149 case PREDECREMENT_EXPR:
3150 case PREINCREMENT_EXPR:
3151 case POSTDECREMENT_EXPR:
3152 case POSTINCREMENT_EXPR:
3154 case REALIGN_LOAD_EXPR:
3156 case REDUC_MAX_EXPR:
3157 case REDUC_MIN_EXPR:
3158 case REDUC_PLUS_EXPR:
3159 case WIDEN_SUM_EXPR:
3160 case WIDEN_MULT_EXPR:
3161 case DOT_PROD_EXPR:
3163 case VEC_WIDEN_MULT_HI_EXPR:
3164 case VEC_WIDEN_MULT_LO_EXPR:
3165 case VEC_UNPACK_HI_EXPR:
3166 case VEC_UNPACK_LO_EXPR:
3167 case VEC_UNPACK_FLOAT_HI_EXPR:
3168 case VEC_UNPACK_FLOAT_LO_EXPR:
3169 case VEC_PACK_TRUNC_EXPR:
3170 case VEC_PACK_SAT_EXPR:
3171 case VEC_PACK_FIX_TRUNC_EXPR:
3172 case VEC_EXTRACT_EVEN_EXPR:
3173 case VEC_EXTRACT_ODD_EXPR:
3174 case VEC_INTERLEAVE_HIGH_EXPR:
3175 case VEC_INTERLEAVE_LOW_EXPR:
3177 return 1;
3179 /* Few special cases of expensive operations. This is useful
3180 to avoid inlining on functions having too many of these. */
3181 case TRUNC_DIV_EXPR:
3182 case CEIL_DIV_EXPR:
3183 case FLOOR_DIV_EXPR:
3184 case ROUND_DIV_EXPR:
3185 case EXACT_DIV_EXPR:
3186 case TRUNC_MOD_EXPR:
3187 case CEIL_MOD_EXPR:
3188 case FLOOR_MOD_EXPR:
3189 case ROUND_MOD_EXPR:
3190 case RDIV_EXPR:
3191 if (TREE_CODE (op2) != INTEGER_CST)
3192 return weights->div_mod_cost;
3193 return 1;
3195 default:
3196 /* We expect a copy assignment with no operator. */
3197 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3198 return 0;
3203 /* Estimate number of instructions that will be created by expanding
3204 the statements in the statement sequence STMTS.
3205 WEIGHTS contains weights attributed to various constructs. */
3207 static
3208 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3210 int cost;
3211 gimple_stmt_iterator gsi;
3213 cost = 0;
3214 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3215 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3217 return cost;
3221 /* Estimate number of instructions that will be created by expanding STMT.
3222 WEIGHTS contains weights attributed to various constructs. */
3225 estimate_num_insns (gimple stmt, eni_weights *weights)
3227 unsigned cost, i;
3228 enum gimple_code code = gimple_code (stmt);
3229 tree lhs;
3230 tree rhs;
3232 switch (code)
3234 case GIMPLE_ASSIGN:
3235 /* Try to estimate the cost of assignments. We have three cases to
3236 deal with:
3237 1) Simple assignments to registers;
3238 2) Stores to things that must live in memory. This includes
3239 "normal" stores to scalars, but also assignments of large
3240 structures, or constructors of big arrays;
3242 Let us look at the first two cases, assuming we have "a = b + C":
3243 <GIMPLE_ASSIGN <var_decl "a">
3244 <plus_expr <var_decl "b"> <constant C>>
3245 If "a" is a GIMPLE register, the assignment to it is free on almost
3246 any target, because "a" usually ends up in a real register. Hence
3247 the only cost of this expression comes from the PLUS_EXPR, and we
3248 can ignore the GIMPLE_ASSIGN.
3249 If "a" is not a GIMPLE register, the assignment to "a" will most
3250 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3251 of moving something into "a", which we compute using the function
3252 estimate_move_cost. */
3253 lhs = gimple_assign_lhs (stmt);
3254 rhs = gimple_assign_rhs1 (stmt);
3256 if (is_gimple_reg (lhs))
3257 cost = 0;
3258 else
3259 cost = estimate_move_cost (TREE_TYPE (lhs));
3261 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3262 cost += estimate_move_cost (TREE_TYPE (rhs));
3264 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3265 gimple_assign_rhs1 (stmt),
3266 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3267 == GIMPLE_BINARY_RHS
3268 ? gimple_assign_rhs2 (stmt) : NULL);
3269 break;
3271 case GIMPLE_COND:
3272 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3273 gimple_op (stmt, 0),
3274 gimple_op (stmt, 1));
3275 break;
3277 case GIMPLE_SWITCH:
3278 /* Take into account cost of the switch + guess 2 conditional jumps for
3279 each case label.
3281 TODO: once the switch expansion logic is sufficiently separated, we can
3282 do better job on estimating cost of the switch. */
3283 if (weights->time_based)
3284 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3285 else
3286 cost = gimple_switch_num_labels (stmt) * 2;
3287 break;
3289 case GIMPLE_CALL:
3291 tree decl = gimple_call_fndecl (stmt);
3292 tree addr = gimple_call_fn (stmt);
3293 tree funtype = TREE_TYPE (addr);
3295 if (POINTER_TYPE_P (funtype))
3296 funtype = TREE_TYPE (funtype);
3298 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3299 cost = weights->target_builtin_call_cost;
3300 else
3301 cost = weights->call_cost;
3303 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3304 switch (DECL_FUNCTION_CODE (decl))
3306 /* Builtins that expand to constants. */
3307 case BUILT_IN_CONSTANT_P:
3308 case BUILT_IN_EXPECT:
3309 case BUILT_IN_OBJECT_SIZE:
3310 case BUILT_IN_UNREACHABLE:
3311 /* Simple register moves or loads from stack. */
3312 case BUILT_IN_RETURN_ADDRESS:
3313 case BUILT_IN_EXTRACT_RETURN_ADDR:
3314 case BUILT_IN_FROB_RETURN_ADDR:
3315 case BUILT_IN_RETURN:
3316 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
3317 case BUILT_IN_FRAME_ADDRESS:
3318 case BUILT_IN_VA_END:
3319 case BUILT_IN_STACK_SAVE:
3320 case BUILT_IN_STACK_RESTORE:
3321 /* Exception state returns or moves registers around. */
3322 case BUILT_IN_EH_FILTER:
3323 case BUILT_IN_EH_POINTER:
3324 case BUILT_IN_EH_COPY_VALUES:
3325 return 0;
3327 /* builtins that are not expensive (that is they are most probably
3328 expanded inline into resonably simple code). */
3329 case BUILT_IN_ABS:
3330 case BUILT_IN_ALLOCA:
3331 case BUILT_IN_BSWAP32:
3332 case BUILT_IN_BSWAP64:
3333 case BUILT_IN_CLZ:
3334 case BUILT_IN_CLZIMAX:
3335 case BUILT_IN_CLZL:
3336 case BUILT_IN_CLZLL:
3337 case BUILT_IN_CTZ:
3338 case BUILT_IN_CTZIMAX:
3339 case BUILT_IN_CTZL:
3340 case BUILT_IN_CTZLL:
3341 case BUILT_IN_FFS:
3342 case BUILT_IN_FFSIMAX:
3343 case BUILT_IN_FFSL:
3344 case BUILT_IN_FFSLL:
3345 case BUILT_IN_IMAXABS:
3346 case BUILT_IN_FINITE:
3347 case BUILT_IN_FINITEF:
3348 case BUILT_IN_FINITEL:
3349 case BUILT_IN_FINITED32:
3350 case BUILT_IN_FINITED64:
3351 case BUILT_IN_FINITED128:
3352 case BUILT_IN_FPCLASSIFY:
3353 case BUILT_IN_ISFINITE:
3354 case BUILT_IN_ISINF_SIGN:
3355 case BUILT_IN_ISINF:
3356 case BUILT_IN_ISINFF:
3357 case BUILT_IN_ISINFL:
3358 case BUILT_IN_ISINFD32:
3359 case BUILT_IN_ISINFD64:
3360 case BUILT_IN_ISINFD128:
3361 case BUILT_IN_ISNAN:
3362 case BUILT_IN_ISNANF:
3363 case BUILT_IN_ISNANL:
3364 case BUILT_IN_ISNAND32:
3365 case BUILT_IN_ISNAND64:
3366 case BUILT_IN_ISNAND128:
3367 case BUILT_IN_ISNORMAL:
3368 case BUILT_IN_ISGREATER:
3369 case BUILT_IN_ISGREATEREQUAL:
3370 case BUILT_IN_ISLESS:
3371 case BUILT_IN_ISLESSEQUAL:
3372 case BUILT_IN_ISLESSGREATER:
3373 case BUILT_IN_ISUNORDERED:
3374 case BUILT_IN_VA_ARG_PACK:
3375 case BUILT_IN_VA_ARG_PACK_LEN:
3376 case BUILT_IN_VA_COPY:
3377 case BUILT_IN_TRAP:
3378 case BUILT_IN_SAVEREGS:
3379 case BUILT_IN_POPCOUNTL:
3380 case BUILT_IN_POPCOUNTLL:
3381 case BUILT_IN_POPCOUNTIMAX:
3382 case BUILT_IN_POPCOUNT:
3383 case BUILT_IN_PARITYL:
3384 case BUILT_IN_PARITYLL:
3385 case BUILT_IN_PARITYIMAX:
3386 case BUILT_IN_PARITY:
3387 case BUILT_IN_LABS:
3388 case BUILT_IN_LLABS:
3389 case BUILT_IN_PREFETCH:
3390 cost = weights->target_builtin_call_cost;
3391 break;
3393 default:
3394 break;
3397 if (decl)
3398 funtype = TREE_TYPE (decl);
3400 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3401 cost += estimate_move_cost (TREE_TYPE (funtype));
3402 /* Our cost must be kept in sync with
3403 cgraph_estimate_size_after_inlining that does use function
3404 declaration to figure out the arguments. */
3405 if (decl && DECL_ARGUMENTS (decl))
3407 tree arg;
3408 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3409 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3410 cost += estimate_move_cost (TREE_TYPE (arg));
3412 else if (funtype && prototype_p (funtype))
3414 tree t;
3415 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3416 t = TREE_CHAIN (t))
3417 if (!VOID_TYPE_P (TREE_VALUE (t)))
3418 cost += estimate_move_cost (TREE_VALUE (t));
3420 else
3422 for (i = 0; i < gimple_call_num_args (stmt); i++)
3424 tree arg = gimple_call_arg (stmt, i);
3425 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3426 cost += estimate_move_cost (TREE_TYPE (arg));
3430 break;
3433 case GIMPLE_GOTO:
3434 case GIMPLE_LABEL:
3435 case GIMPLE_NOP:
3436 case GIMPLE_PHI:
3437 case GIMPLE_RETURN:
3438 case GIMPLE_PREDICT:
3439 case GIMPLE_DEBUG:
3440 return 0;
3442 case GIMPLE_ASM:
3443 return asm_str_count (gimple_asm_string (stmt));
3445 case GIMPLE_RESX:
3446 /* This is either going to be an external function call with one
3447 argument, or two register copy statements plus a goto. */
3448 return 2;
3450 case GIMPLE_EH_DISPATCH:
3451 /* ??? This is going to turn into a switch statement. Ideally
3452 we'd have a look at the eh region and estimate the number of
3453 edges involved. */
3454 return 10;
3456 case GIMPLE_BIND:
3457 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3459 case GIMPLE_EH_FILTER:
3460 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3462 case GIMPLE_CATCH:
3463 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3465 case GIMPLE_TRY:
3466 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3467 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3469 /* OpenMP directives are generally very expensive. */
3471 case GIMPLE_OMP_RETURN:
3472 case GIMPLE_OMP_SECTIONS_SWITCH:
3473 case GIMPLE_OMP_ATOMIC_STORE:
3474 case GIMPLE_OMP_CONTINUE:
3475 /* ...except these, which are cheap. */
3476 return 0;
3478 case GIMPLE_OMP_ATOMIC_LOAD:
3479 return weights->omp_cost;
3481 case GIMPLE_OMP_FOR:
3482 return (weights->omp_cost
3483 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3484 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3486 case GIMPLE_OMP_PARALLEL:
3487 case GIMPLE_OMP_TASK:
3488 case GIMPLE_OMP_CRITICAL:
3489 case GIMPLE_OMP_MASTER:
3490 case GIMPLE_OMP_ORDERED:
3491 case GIMPLE_OMP_SECTION:
3492 case GIMPLE_OMP_SECTIONS:
3493 case GIMPLE_OMP_SINGLE:
3494 return (weights->omp_cost
3495 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3497 default:
3498 gcc_unreachable ();
3501 return cost;
3504 /* Estimate number of instructions that will be created by expanding
3505 function FNDECL. WEIGHTS contains weights attributed to various
3506 constructs. */
3509 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3511 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3512 gimple_stmt_iterator bsi;
3513 basic_block bb;
3514 int n = 0;
3516 gcc_assert (my_function && my_function->cfg);
3517 FOR_EACH_BB_FN (bb, my_function)
3519 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3520 n += estimate_num_insns (gsi_stmt (bsi), weights);
3523 return n;
3527 /* Initializes weights used by estimate_num_insns. */
3529 void
3530 init_inline_once (void)
3532 eni_size_weights.call_cost = 1;
3533 eni_size_weights.target_builtin_call_cost = 1;
3534 eni_size_weights.div_mod_cost = 1;
3535 eni_size_weights.omp_cost = 40;
3536 eni_size_weights.time_based = false;
3538 /* Estimating time for call is difficult, since we have no idea what the
3539 called function does. In the current uses of eni_time_weights,
3540 underestimating the cost does less harm than overestimating it, so
3541 we choose a rather small value here. */
3542 eni_time_weights.call_cost = 10;
3543 eni_time_weights.target_builtin_call_cost = 10;
3544 eni_time_weights.div_mod_cost = 10;
3545 eni_time_weights.omp_cost = 40;
3546 eni_time_weights.time_based = true;
3549 /* Estimate the number of instructions in a gimple_seq. */
3552 count_insns_seq (gimple_seq seq, eni_weights *weights)
3554 gimple_stmt_iterator gsi;
3555 int n = 0;
3556 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3557 n += estimate_num_insns (gsi_stmt (gsi), weights);
3559 return n;
3563 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3565 static void
3566 prepend_lexical_block (tree current_block, tree new_block)
3568 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3569 BLOCK_SUBBLOCKS (current_block) = new_block;
3570 BLOCK_SUPERCONTEXT (new_block) = current_block;
3573 /* Fetch callee declaration from the call graph edge going from NODE and
3574 associated with STMR call statement. Return NULL_TREE if not found. */
3575 static tree
3576 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3578 struct cgraph_edge *cs;
3580 cs = cgraph_edge (node, stmt);
3581 if (cs && !cs->indirect_unknown_callee)
3582 return cs->callee->decl;
3584 return NULL_TREE;
3587 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3589 static bool
3590 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3592 tree use_retvar;
3593 tree fn;
3594 struct pointer_map_t *st, *dst;
3595 tree return_slot;
3596 tree modify_dest;
3597 location_t saved_location;
3598 struct cgraph_edge *cg_edge;
3599 cgraph_inline_failed_t reason;
3600 basic_block return_block;
3601 edge e;
3602 gimple_stmt_iterator gsi, stmt_gsi;
3603 bool successfully_inlined = FALSE;
3604 bool purge_dead_abnormal_edges;
3605 tree t_step;
3606 tree var;
3608 /* Set input_location here so we get the right instantiation context
3609 if we call instantiate_decl from inlinable_function_p. */
3610 saved_location = input_location;
3611 if (gimple_has_location (stmt))
3612 input_location = gimple_location (stmt);
3614 /* From here on, we're only interested in CALL_EXPRs. */
3615 if (gimple_code (stmt) != GIMPLE_CALL)
3616 goto egress;
3618 /* First, see if we can figure out what function is being called.
3619 If we cannot, then there is no hope of inlining the function. */
3620 fn = gimple_call_fndecl (stmt);
3621 if (!fn)
3623 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3624 if (!fn)
3625 goto egress;
3628 /* Turn forward declarations into real ones. */
3629 fn = cgraph_node (fn)->decl;
3631 /* If FN is a declaration of a function in a nested scope that was
3632 globally declared inline, we don't set its DECL_INITIAL.
3633 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3634 C++ front-end uses it for cdtors to refer to their internal
3635 declarations, that are not real functions. Fortunately those
3636 don't have trees to be saved, so we can tell by checking their
3637 gimple_body. */
3638 if (!DECL_INITIAL (fn)
3639 && DECL_ABSTRACT_ORIGIN (fn)
3640 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3641 fn = DECL_ABSTRACT_ORIGIN (fn);
3643 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3644 Kill this check once this is fixed. */
3645 if (!id->dst_node->analyzed)
3646 goto egress;
3648 cg_edge = cgraph_edge (id->dst_node, stmt);
3650 /* First check that inlining isn't simply forbidden in this case. */
3651 if (inline_forbidden_into_p (cg_edge->caller->decl, cg_edge->callee->decl))
3652 goto egress;
3654 /* Don't try to inline functions that are not well-suited to inlining. */
3655 if (!cgraph_inline_p (cg_edge, &reason))
3657 /* If this call was originally indirect, we do not want to emit any
3658 inlining related warnings or sorry messages because there are no
3659 guarantees regarding those. */
3660 if (cg_edge->indirect_inlining_edge)
3661 goto egress;
3663 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3664 /* Avoid warnings during early inline pass. */
3665 && cgraph_global_info_ready)
3667 sorry ("inlining failed in call to %q+F: %s", fn,
3668 cgraph_inline_failed_string (reason));
3669 sorry ("called from here");
3671 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3672 && !DECL_IN_SYSTEM_HEADER (fn)
3673 && reason != CIF_UNSPECIFIED
3674 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3675 /* Avoid warnings during early inline pass. */
3676 && cgraph_global_info_ready)
3678 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3679 fn, cgraph_inline_failed_string (reason));
3680 warning (OPT_Winline, "called from here");
3682 goto egress;
3684 fn = cg_edge->callee->decl;
3686 #ifdef ENABLE_CHECKING
3687 if (cg_edge->callee->decl != id->dst_node->decl)
3688 verify_cgraph_node (cg_edge->callee);
3689 #endif
3691 /* We will be inlining this callee. */
3692 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3694 /* Update the callers EH personality. */
3695 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3696 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3697 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3699 /* Split the block holding the GIMPLE_CALL. */
3700 e = split_block (bb, stmt);
3701 bb = e->src;
3702 return_block = e->dest;
3703 remove_edge (e);
3705 /* split_block splits after the statement; work around this by
3706 moving the call into the second block manually. Not pretty,
3707 but seems easier than doing the CFG manipulation by hand
3708 when the GIMPLE_CALL is in the last statement of BB. */
3709 stmt_gsi = gsi_last_bb (bb);
3710 gsi_remove (&stmt_gsi, false);
3712 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3713 been the source of abnormal edges. In this case, schedule
3714 the removal of dead abnormal edges. */
3715 gsi = gsi_start_bb (return_block);
3716 if (gsi_end_p (gsi))
3718 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3719 purge_dead_abnormal_edges = true;
3721 else
3723 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3724 purge_dead_abnormal_edges = false;
3727 stmt_gsi = gsi_start_bb (return_block);
3729 /* Build a block containing code to initialize the arguments, the
3730 actual inline expansion of the body, and a label for the return
3731 statements within the function to jump to. The type of the
3732 statement expression is the return type of the function call. */
3733 id->block = make_node (BLOCK);
3734 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3735 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3736 prepend_lexical_block (gimple_block (stmt), id->block);
3738 /* Local declarations will be replaced by their equivalents in this
3739 map. */
3740 st = id->decl_map;
3741 id->decl_map = pointer_map_create ();
3742 dst = id->debug_map;
3743 id->debug_map = NULL;
3745 /* Record the function we are about to inline. */
3746 id->src_fn = fn;
3747 id->src_node = cg_edge->callee;
3748 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3749 id->gimple_call = stmt;
3751 gcc_assert (!id->src_cfun->after_inlining);
3753 id->entry_bb = bb;
3754 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3756 gimple_stmt_iterator si = gsi_last_bb (bb);
3757 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3758 NOT_TAKEN),
3759 GSI_NEW_STMT);
3761 initialize_inlined_parameters (id, stmt, fn, bb);
3763 if (DECL_INITIAL (fn))
3764 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3766 /* Return statements in the function body will be replaced by jumps
3767 to the RET_LABEL. */
3768 gcc_assert (DECL_INITIAL (fn));
3769 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3771 /* Find the LHS to which the result of this call is assigned. */
3772 return_slot = NULL;
3773 if (gimple_call_lhs (stmt))
3775 modify_dest = gimple_call_lhs (stmt);
3777 /* The function which we are inlining might not return a value,
3778 in which case we should issue a warning that the function
3779 does not return a value. In that case the optimizers will
3780 see that the variable to which the value is assigned was not
3781 initialized. We do not want to issue a warning about that
3782 uninitialized variable. */
3783 if (DECL_P (modify_dest))
3784 TREE_NO_WARNING (modify_dest) = 1;
3786 if (gimple_call_return_slot_opt_p (stmt))
3788 return_slot = modify_dest;
3789 modify_dest = NULL;
3792 else
3793 modify_dest = NULL;
3795 /* If we are inlining a call to the C++ operator new, we don't want
3796 to use type based alias analysis on the return value. Otherwise
3797 we may get confused if the compiler sees that the inlined new
3798 function returns a pointer which was just deleted. See bug
3799 33407. */
3800 if (DECL_IS_OPERATOR_NEW (fn))
3802 return_slot = NULL;
3803 modify_dest = NULL;
3806 /* Declare the return variable for the function. */
3807 use_retvar = declare_return_variable (id, return_slot, modify_dest);
3809 /* Add local vars in this inlined callee to caller. */
3810 t_step = id->src_cfun->local_decls;
3811 for (; t_step; t_step = TREE_CHAIN (t_step))
3813 var = TREE_VALUE (t_step);
3814 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3816 if (var_ann (var) && add_referenced_var (var))
3817 cfun->local_decls = tree_cons (NULL_TREE, var,
3818 cfun->local_decls);
3820 else if (!can_be_nonlocal (var, id))
3821 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3822 cfun->local_decls);
3825 if (dump_file && (dump_flags & TDF_DETAILS))
3827 fprintf (dump_file, "Inlining ");
3828 print_generic_expr (dump_file, id->src_fn, 0);
3829 fprintf (dump_file, " to ");
3830 print_generic_expr (dump_file, id->dst_fn, 0);
3831 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3834 /* This is it. Duplicate the callee body. Assume callee is
3835 pre-gimplified. Note that we must not alter the caller
3836 function in any way before this point, as this CALL_EXPR may be
3837 a self-referential call; if we're calling ourselves, we need to
3838 duplicate our body before altering anything. */
3839 copy_body (id, bb->count,
3840 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3841 bb, return_block);
3843 /* Reset the escaped solution. */
3844 if (cfun->gimple_df)
3845 pt_solution_reset (&cfun->gimple_df->escaped);
3847 /* Clean up. */
3848 if (id->debug_map)
3850 pointer_map_destroy (id->debug_map);
3851 id->debug_map = dst;
3853 pointer_map_destroy (id->decl_map);
3854 id->decl_map = st;
3856 /* Unlink the calls virtual operands before replacing it. */
3857 unlink_stmt_vdef (stmt);
3859 /* If the inlined function returns a result that we care about,
3860 substitute the GIMPLE_CALL with an assignment of the return
3861 variable to the LHS of the call. That is, if STMT was
3862 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3863 if (use_retvar && gimple_call_lhs (stmt))
3865 gimple old_stmt = stmt;
3866 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3867 gsi_replace (&stmt_gsi, stmt, false);
3868 if (gimple_in_ssa_p (cfun))
3869 mark_symbols_for_renaming (stmt);
3870 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3872 else
3874 /* Handle the case of inlining a function with no return
3875 statement, which causes the return value to become undefined. */
3876 if (gimple_call_lhs (stmt)
3877 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3879 tree name = gimple_call_lhs (stmt);
3880 tree var = SSA_NAME_VAR (name);
3881 tree def = gimple_default_def (cfun, var);
3883 if (def)
3885 /* If the variable is used undefined, make this name
3886 undefined via a move. */
3887 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3888 gsi_replace (&stmt_gsi, stmt, true);
3890 else
3892 /* Otherwise make this variable undefined. */
3893 gsi_remove (&stmt_gsi, true);
3894 set_default_def (var, name);
3895 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3898 else
3899 gsi_remove (&stmt_gsi, true);
3902 if (purge_dead_abnormal_edges)
3903 gimple_purge_dead_abnormal_call_edges (return_block);
3905 /* If the value of the new expression is ignored, that's OK. We
3906 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3907 the equivalent inlined version either. */
3908 if (is_gimple_assign (stmt))
3910 gcc_assert (gimple_assign_single_p (stmt)
3911 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3912 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3915 /* Output the inlining info for this abstract function, since it has been
3916 inlined. If we don't do this now, we can lose the information about the
3917 variables in the function when the blocks get blown away as soon as we
3918 remove the cgraph node. */
3919 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3921 /* Update callgraph if needed. */
3922 cgraph_remove_node (cg_edge->callee);
3924 id->block = NULL_TREE;
3925 successfully_inlined = TRUE;
3927 egress:
3928 input_location = saved_location;
3929 return successfully_inlined;
3932 /* Expand call statements reachable from STMT_P.
3933 We can only have CALL_EXPRs as the "toplevel" tree code or nested
3934 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
3935 unfortunately not use that function here because we need a pointer
3936 to the CALL_EXPR, not the tree itself. */
3938 static bool
3939 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3941 gimple_stmt_iterator gsi;
3943 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3945 gimple stmt = gsi_stmt (gsi);
3947 if (is_gimple_call (stmt)
3948 && expand_call_inline (bb, stmt, id))
3949 return true;
3952 return false;
3956 /* Walk all basic blocks created after FIRST and try to fold every statement
3957 in the STATEMENTS pointer set. */
3959 static void
3960 fold_marked_statements (int first, struct pointer_set_t *statements)
3962 for (; first < n_basic_blocks; first++)
3963 if (BASIC_BLOCK (first))
3965 gimple_stmt_iterator gsi;
3967 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3968 !gsi_end_p (gsi);
3969 gsi_next (&gsi))
3970 if (pointer_set_contains (statements, gsi_stmt (gsi)))
3972 gimple old_stmt = gsi_stmt (gsi);
3973 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3975 if (old_decl && DECL_BUILT_IN (old_decl))
3977 /* Folding builtins can create multiple instructions,
3978 we need to look at all of them. */
3979 gimple_stmt_iterator i2 = gsi;
3980 gsi_prev (&i2);
3981 if (fold_stmt (&gsi))
3983 gimple new_stmt;
3984 if (gsi_end_p (i2))
3985 i2 = gsi_start_bb (BASIC_BLOCK (first));
3986 else
3987 gsi_next (&i2);
3988 while (1)
3990 new_stmt = gsi_stmt (i2);
3991 update_stmt (new_stmt);
3992 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3993 new_stmt);
3995 if (new_stmt == gsi_stmt (gsi))
3997 /* It is okay to check only for the very last
3998 of these statements. If it is a throwing
3999 statement nothing will change. If it isn't
4000 this can remove EH edges. If that weren't
4001 correct then because some intermediate stmts
4002 throw, but not the last one. That would mean
4003 we'd have to split the block, which we can't
4004 here and we'd loose anyway. And as builtins
4005 probably never throw, this all
4006 is mood anyway. */
4007 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4008 new_stmt))
4009 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4010 break;
4012 gsi_next (&i2);
4016 else if (fold_stmt (&gsi))
4018 /* Re-read the statement from GSI as fold_stmt() may
4019 have changed it. */
4020 gimple new_stmt = gsi_stmt (gsi);
4021 update_stmt (new_stmt);
4023 if (is_gimple_call (old_stmt)
4024 || is_gimple_call (new_stmt))
4025 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4026 new_stmt);
4028 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4029 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4035 /* Return true if BB has at least one abnormal outgoing edge. */
4037 static inline bool
4038 has_abnormal_outgoing_edge_p (basic_block bb)
4040 edge e;
4041 edge_iterator ei;
4043 FOR_EACH_EDGE (e, ei, bb->succs)
4044 if (e->flags & EDGE_ABNORMAL)
4045 return true;
4047 return false;
4050 /* Expand calls to inline functions in the body of FN. */
4052 unsigned int
4053 optimize_inline_calls (tree fn)
4055 copy_body_data id;
4056 basic_block bb;
4057 int last = n_basic_blocks;
4058 struct gimplify_ctx gctx;
4060 /* There is no point in performing inlining if errors have already
4061 occurred -- and we might crash if we try to inline invalid
4062 code. */
4063 if (errorcount || sorrycount)
4064 return 0;
4066 /* Clear out ID. */
4067 memset (&id, 0, sizeof (id));
4069 id.src_node = id.dst_node = cgraph_node (fn);
4070 id.dst_fn = fn;
4071 /* Or any functions that aren't finished yet. */
4072 if (current_function_decl)
4073 id.dst_fn = current_function_decl;
4075 id.copy_decl = copy_decl_maybe_to_var;
4076 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4077 id.transform_new_cfg = false;
4078 id.transform_return_to_modify = true;
4079 id.transform_lang_insert_block = NULL;
4080 id.statements_to_fold = pointer_set_create ();
4082 push_gimplify_context (&gctx);
4084 /* We make no attempts to keep dominance info up-to-date. */
4085 free_dominance_info (CDI_DOMINATORS);
4086 free_dominance_info (CDI_POST_DOMINATORS);
4088 /* Register specific gimple functions. */
4089 gimple_register_cfg_hooks ();
4091 /* Reach the trees by walking over the CFG, and note the
4092 enclosing basic-blocks in the call edges. */
4093 /* We walk the blocks going forward, because inlined function bodies
4094 will split id->current_basic_block, and the new blocks will
4095 follow it; we'll trudge through them, processing their CALL_EXPRs
4096 along the way. */
4097 FOR_EACH_BB (bb)
4098 gimple_expand_calls_inline (bb, &id);
4100 pop_gimplify_context (NULL);
4102 #ifdef ENABLE_CHECKING
4104 struct cgraph_edge *e;
4106 verify_cgraph_node (id.dst_node);
4108 /* Double check that we inlined everything we are supposed to inline. */
4109 for (e = id.dst_node->callees; e; e = e->next_callee)
4110 gcc_assert (e->inline_failed);
4112 #endif
4114 /* Fold the statements before compacting/renumbering the basic blocks. */
4115 fold_marked_statements (last, id.statements_to_fold);
4116 pointer_set_destroy (id.statements_to_fold);
4118 gcc_assert (!id.debug_stmts);
4120 /* Renumber the (code) basic_blocks consecutively. */
4121 compact_blocks ();
4122 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4123 number_blocks (fn);
4125 fold_cond_expr_cond ();
4126 delete_unreachable_blocks_update_callgraph (&id);
4127 #ifdef ENABLE_CHECKING
4128 verify_cgraph_node (id.dst_node);
4129 #endif
4131 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4132 not possible yet - the IPA passes might make various functions to not
4133 throw and they don't care to proactively update local EH info. This is
4134 done later in fixup_cfg pass that also execute the verification. */
4135 return (TODO_update_ssa
4136 | TODO_cleanup_cfg
4137 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4138 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4141 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4143 tree
4144 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4146 enum tree_code code = TREE_CODE (*tp);
4147 enum tree_code_class cl = TREE_CODE_CLASS (code);
4149 /* We make copies of most nodes. */
4150 if (IS_EXPR_CODE_CLASS (cl)
4151 || code == TREE_LIST
4152 || code == TREE_VEC
4153 || code == TYPE_DECL
4154 || code == OMP_CLAUSE)
4156 /* Because the chain gets clobbered when we make a copy, we save it
4157 here. */
4158 tree chain = NULL_TREE, new_tree;
4160 chain = TREE_CHAIN (*tp);
4162 /* Copy the node. */
4163 new_tree = copy_node (*tp);
4165 /* Propagate mudflap marked-ness. */
4166 if (flag_mudflap && mf_marked_p (*tp))
4167 mf_mark (new_tree);
4169 *tp = new_tree;
4171 /* Now, restore the chain, if appropriate. That will cause
4172 walk_tree to walk into the chain as well. */
4173 if (code == PARM_DECL
4174 || code == TREE_LIST
4175 || code == OMP_CLAUSE)
4176 TREE_CHAIN (*tp) = chain;
4178 /* For now, we don't update BLOCKs when we make copies. So, we
4179 have to nullify all BIND_EXPRs. */
4180 if (TREE_CODE (*tp) == BIND_EXPR)
4181 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4183 else if (code == CONSTRUCTOR)
4185 /* CONSTRUCTOR nodes need special handling because
4186 we need to duplicate the vector of elements. */
4187 tree new_tree;
4189 new_tree = copy_node (*tp);
4191 /* Propagate mudflap marked-ness. */
4192 if (flag_mudflap && mf_marked_p (*tp))
4193 mf_mark (new_tree);
4195 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4196 CONSTRUCTOR_ELTS (*tp));
4197 *tp = new_tree;
4199 else if (TREE_CODE_CLASS (code) == tcc_type)
4200 *walk_subtrees = 0;
4201 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4202 *walk_subtrees = 0;
4203 else if (TREE_CODE_CLASS (code) == tcc_constant)
4204 *walk_subtrees = 0;
4205 else
4206 gcc_assert (code != STATEMENT_LIST);
4207 return NULL_TREE;
4210 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4211 information indicating to what new SAVE_EXPR this one should be mapped,
4212 use that one. Otherwise, create a new node and enter it in ST. FN is
4213 the function into which the copy will be placed. */
4215 static void
4216 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4218 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4219 tree *n;
4220 tree t;
4222 /* See if we already encountered this SAVE_EXPR. */
4223 n = (tree *) pointer_map_contains (st, *tp);
4225 /* If we didn't already remap this SAVE_EXPR, do so now. */
4226 if (!n)
4228 t = copy_node (*tp);
4230 /* Remember this SAVE_EXPR. */
4231 *pointer_map_insert (st, *tp) = t;
4232 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4233 *pointer_map_insert (st, t) = t;
4235 else
4237 /* We've already walked into this SAVE_EXPR; don't do it again. */
4238 *walk_subtrees = 0;
4239 t = *n;
4242 /* Replace this SAVE_EXPR with the copy. */
4243 *tp = t;
4246 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4247 copies the declaration and enters it in the splay_tree in DATA (which is
4248 really an `copy_body_data *'). */
4250 static tree
4251 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4252 void *data)
4254 copy_body_data *id = (copy_body_data *) data;
4256 /* Don't walk into types. */
4257 if (TYPE_P (*tp))
4258 *walk_subtrees = 0;
4260 else if (TREE_CODE (*tp) == LABEL_EXPR)
4262 tree decl = TREE_OPERAND (*tp, 0);
4264 /* Copy the decl and remember the copy. */
4265 insert_decl_map (id, decl, id->copy_decl (decl, id));
4268 return NULL_TREE;
4271 /* Perform any modifications to EXPR required when it is unsaved. Does
4272 not recurse into EXPR's subtrees. */
4274 static void
4275 unsave_expr_1 (tree expr)
4277 switch (TREE_CODE (expr))
4279 case TARGET_EXPR:
4280 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4281 It's OK for this to happen if it was part of a subtree that
4282 isn't immediately expanded, such as operand 2 of another
4283 TARGET_EXPR. */
4284 if (TREE_OPERAND (expr, 1))
4285 break;
4287 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4288 TREE_OPERAND (expr, 3) = NULL_TREE;
4289 break;
4291 default:
4292 break;
4296 /* Called via walk_tree when an expression is unsaved. Using the
4297 splay_tree pointed to by ST (which is really a `splay_tree'),
4298 remaps all local declarations to appropriate replacements. */
4300 static tree
4301 unsave_r (tree *tp, int *walk_subtrees, void *data)
4303 copy_body_data *id = (copy_body_data *) data;
4304 struct pointer_map_t *st = id->decl_map;
4305 tree *n;
4307 /* Only a local declaration (variable or label). */
4308 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4309 || TREE_CODE (*tp) == LABEL_DECL)
4311 /* Lookup the declaration. */
4312 n = (tree *) pointer_map_contains (st, *tp);
4314 /* If it's there, remap it. */
4315 if (n)
4316 *tp = *n;
4319 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4320 gcc_unreachable ();
4321 else if (TREE_CODE (*tp) == BIND_EXPR)
4322 copy_bind_expr (tp, walk_subtrees, id);
4323 else if (TREE_CODE (*tp) == SAVE_EXPR
4324 || TREE_CODE (*tp) == TARGET_EXPR)
4325 remap_save_expr (tp, st, walk_subtrees);
4326 else
4328 copy_tree_r (tp, walk_subtrees, NULL);
4330 /* Do whatever unsaving is required. */
4331 unsave_expr_1 (*tp);
4334 /* Keep iterating. */
4335 return NULL_TREE;
4338 /* Copies everything in EXPR and replaces variables, labels
4339 and SAVE_EXPRs local to EXPR. */
4341 tree
4342 unsave_expr_now (tree expr)
4344 copy_body_data id;
4346 /* There's nothing to do for NULL_TREE. */
4347 if (expr == 0)
4348 return expr;
4350 /* Set up ID. */
4351 memset (&id, 0, sizeof (id));
4352 id.src_fn = current_function_decl;
4353 id.dst_fn = current_function_decl;
4354 id.decl_map = pointer_map_create ();
4355 id.debug_map = NULL;
4357 id.copy_decl = copy_decl_no_change;
4358 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4359 id.transform_new_cfg = false;
4360 id.transform_return_to_modify = false;
4361 id.transform_lang_insert_block = NULL;
4363 /* Walk the tree once to find local labels. */
4364 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4366 /* Walk the tree again, copying, remapping, and unsaving. */
4367 walk_tree (&expr, unsave_r, &id, NULL);
4369 /* Clean up. */
4370 pointer_map_destroy (id.decl_map);
4371 if (id.debug_map)
4372 pointer_map_destroy (id.debug_map);
4374 return expr;
4377 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4378 label, copies the declaration and enters it in the splay_tree in DATA (which
4379 is really a 'copy_body_data *'. */
4381 static tree
4382 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4383 bool *handled_ops_p ATTRIBUTE_UNUSED,
4384 struct walk_stmt_info *wi)
4386 copy_body_data *id = (copy_body_data *) wi->info;
4387 gimple stmt = gsi_stmt (*gsip);
4389 if (gimple_code (stmt) == GIMPLE_LABEL)
4391 tree decl = gimple_label_label (stmt);
4393 /* Copy the decl and remember the copy. */
4394 insert_decl_map (id, decl, id->copy_decl (decl, id));
4397 return NULL_TREE;
4401 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4402 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4403 remaps all local declarations to appropriate replacements in gimple
4404 operands. */
4406 static tree
4407 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4409 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4410 copy_body_data *id = (copy_body_data *) wi->info;
4411 struct pointer_map_t *st = id->decl_map;
4412 tree *n;
4413 tree expr = *tp;
4415 /* Only a local declaration (variable or label). */
4416 if ((TREE_CODE (expr) == VAR_DECL
4417 && !TREE_STATIC (expr))
4418 || TREE_CODE (expr) == LABEL_DECL)
4420 /* Lookup the declaration. */
4421 n = (tree *) pointer_map_contains (st, expr);
4423 /* If it's there, remap it. */
4424 if (n)
4425 *tp = *n;
4426 *walk_subtrees = 0;
4428 else if (TREE_CODE (expr) == STATEMENT_LIST
4429 || TREE_CODE (expr) == BIND_EXPR
4430 || TREE_CODE (expr) == SAVE_EXPR)
4431 gcc_unreachable ();
4432 else if (TREE_CODE (expr) == TARGET_EXPR)
4434 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4435 It's OK for this to happen if it was part of a subtree that
4436 isn't immediately expanded, such as operand 2 of another
4437 TARGET_EXPR. */
4438 if (!TREE_OPERAND (expr, 1))
4440 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4441 TREE_OPERAND (expr, 3) = NULL_TREE;
4445 /* Keep iterating. */
4446 return NULL_TREE;
4450 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4451 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4452 remaps all local declarations to appropriate replacements in gimple
4453 statements. */
4455 static tree
4456 replace_locals_stmt (gimple_stmt_iterator *gsip,
4457 bool *handled_ops_p ATTRIBUTE_UNUSED,
4458 struct walk_stmt_info *wi)
4460 copy_body_data *id = (copy_body_data *) wi->info;
4461 gimple stmt = gsi_stmt (*gsip);
4463 if (gimple_code (stmt) == GIMPLE_BIND)
4465 tree block = gimple_bind_block (stmt);
4467 if (block)
4469 remap_block (&block, id);
4470 gimple_bind_set_block (stmt, block);
4473 /* This will remap a lot of the same decls again, but this should be
4474 harmless. */
4475 if (gimple_bind_vars (stmt))
4476 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4479 /* Keep iterating. */
4480 return NULL_TREE;
4484 /* Copies everything in SEQ and replaces variables and labels local to
4485 current_function_decl. */
4487 gimple_seq
4488 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4490 copy_body_data id;
4491 struct walk_stmt_info wi;
4492 struct pointer_set_t *visited;
4493 gimple_seq copy;
4495 /* There's nothing to do for NULL_TREE. */
4496 if (seq == NULL)
4497 return seq;
4499 /* Set up ID. */
4500 memset (&id, 0, sizeof (id));
4501 id.src_fn = current_function_decl;
4502 id.dst_fn = current_function_decl;
4503 id.decl_map = pointer_map_create ();
4504 id.debug_map = NULL;
4506 id.copy_decl = copy_decl_no_change;
4507 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4508 id.transform_new_cfg = false;
4509 id.transform_return_to_modify = false;
4510 id.transform_lang_insert_block = NULL;
4512 /* Walk the tree once to find local labels. */
4513 memset (&wi, 0, sizeof (wi));
4514 visited = pointer_set_create ();
4515 wi.info = &id;
4516 wi.pset = visited;
4517 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4518 pointer_set_destroy (visited);
4520 copy = gimple_seq_copy (seq);
4522 /* Walk the copy, remapping decls. */
4523 memset (&wi, 0, sizeof (wi));
4524 wi.info = &id;
4525 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4527 /* Clean up. */
4528 pointer_map_destroy (id.decl_map);
4529 if (id.debug_map)
4530 pointer_map_destroy (id.debug_map);
4532 return copy;
4536 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4538 static tree
4539 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4541 if (*tp == data)
4542 return (tree) data;
4543 else
4544 return NULL;
4547 bool
4548 debug_find_tree (tree top, tree search)
4550 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4554 /* Declare the variables created by the inliner. Add all the variables in
4555 VARS to BIND_EXPR. */
4557 static void
4558 declare_inline_vars (tree block, tree vars)
4560 tree t;
4561 for (t = vars; t; t = TREE_CHAIN (t))
4563 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4564 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4565 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4568 if (block)
4569 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4572 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4573 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4574 VAR_DECL translation. */
4576 static tree
4577 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4579 /* Don't generate debug information for the copy if we wouldn't have
4580 generated it for the copy either. */
4581 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4582 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4584 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4585 declaration inspired this copy. */
4586 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4588 /* The new variable/label has no RTL, yet. */
4589 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4590 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4591 SET_DECL_RTL (copy, NULL_RTX);
4593 /* These args would always appear unused, if not for this. */
4594 TREE_USED (copy) = 1;
4596 /* Set the context for the new declaration. */
4597 if (!DECL_CONTEXT (decl))
4598 /* Globals stay global. */
4600 else if (DECL_CONTEXT (decl) != id->src_fn)
4601 /* Things that weren't in the scope of the function we're inlining
4602 from aren't in the scope we're inlining to, either. */
4604 else if (TREE_STATIC (decl))
4605 /* Function-scoped static variables should stay in the original
4606 function. */
4608 else
4609 /* Ordinary automatic local variables are now in the scope of the
4610 new function. */
4611 DECL_CONTEXT (copy) = id->dst_fn;
4613 return copy;
4616 static tree
4617 copy_decl_to_var (tree decl, copy_body_data *id)
4619 tree copy, type;
4621 gcc_assert (TREE_CODE (decl) == PARM_DECL
4622 || TREE_CODE (decl) == RESULT_DECL);
4624 type = TREE_TYPE (decl);
4626 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4627 VAR_DECL, DECL_NAME (decl), type);
4628 if (DECL_PT_UID_SET_P (decl))
4629 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4630 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4631 TREE_READONLY (copy) = TREE_READONLY (decl);
4632 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4633 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4635 return copy_decl_for_dup_finish (id, decl, copy);
4638 /* Like copy_decl_to_var, but create a return slot object instead of a
4639 pointer variable for return by invisible reference. */
4641 static tree
4642 copy_result_decl_to_var (tree decl, copy_body_data *id)
4644 tree copy, type;
4646 gcc_assert (TREE_CODE (decl) == PARM_DECL
4647 || TREE_CODE (decl) == RESULT_DECL);
4649 type = TREE_TYPE (decl);
4650 if (DECL_BY_REFERENCE (decl))
4651 type = TREE_TYPE (type);
4653 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4654 VAR_DECL, DECL_NAME (decl), type);
4655 if (DECL_PT_UID_SET_P (decl))
4656 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4657 TREE_READONLY (copy) = TREE_READONLY (decl);
4658 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4659 if (!DECL_BY_REFERENCE (decl))
4661 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4662 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4665 return copy_decl_for_dup_finish (id, decl, copy);
4668 tree
4669 copy_decl_no_change (tree decl, copy_body_data *id)
4671 tree copy;
4673 copy = copy_node (decl);
4675 /* The COPY is not abstract; it will be generated in DST_FN. */
4676 DECL_ABSTRACT (copy) = 0;
4677 lang_hooks.dup_lang_specific_decl (copy);
4679 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4680 been taken; it's for internal bookkeeping in expand_goto_internal. */
4681 if (TREE_CODE (copy) == LABEL_DECL)
4683 TREE_ADDRESSABLE (copy) = 0;
4684 LABEL_DECL_UID (copy) = -1;
4687 return copy_decl_for_dup_finish (id, decl, copy);
4690 static tree
4691 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4693 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4694 return copy_decl_to_var (decl, id);
4695 else
4696 return copy_decl_no_change (decl, id);
4699 /* Return a copy of the function's argument tree. */
4700 static tree
4701 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4702 bitmap args_to_skip, tree *vars)
4704 tree arg, *parg;
4705 tree new_parm = NULL;
4706 int i = 0;
4708 parg = &new_parm;
4710 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4711 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4713 tree new_tree = remap_decl (arg, id);
4714 lang_hooks.dup_lang_specific_decl (new_tree);
4715 *parg = new_tree;
4716 parg = &TREE_CHAIN (new_tree);
4718 else if (!pointer_map_contains (id->decl_map, arg))
4720 /* Make an equivalent VAR_DECL. If the argument was used
4721 as temporary variable later in function, the uses will be
4722 replaced by local variable. */
4723 tree var = copy_decl_to_var (arg, id);
4724 get_var_ann (var);
4725 add_referenced_var (var);
4726 insert_decl_map (id, arg, var);
4727 /* Declare this new variable. */
4728 TREE_CHAIN (var) = *vars;
4729 *vars = var;
4731 return new_parm;
4734 /* Return a copy of the function's static chain. */
4735 static tree
4736 copy_static_chain (tree static_chain, copy_body_data * id)
4738 tree *chain_copy, *pvar;
4740 chain_copy = &static_chain;
4741 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4743 tree new_tree = remap_decl (*pvar, id);
4744 lang_hooks.dup_lang_specific_decl (new_tree);
4745 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4746 *pvar = new_tree;
4748 return static_chain;
4751 /* Return true if the function is allowed to be versioned.
4752 This is a guard for the versioning functionality. */
4754 bool
4755 tree_versionable_function_p (tree fndecl)
4757 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4758 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4761 /* Delete all unreachable basic blocks and update callgraph.
4762 Doing so is somewhat nontrivial because we need to update all clones and
4763 remove inline function that become unreachable. */
4765 static bool
4766 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4768 bool changed = false;
4769 basic_block b, next_bb;
4771 find_unreachable_blocks ();
4773 /* Delete all unreachable basic blocks. */
4775 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4777 next_bb = b->next_bb;
4779 if (!(b->flags & BB_REACHABLE))
4781 gimple_stmt_iterator bsi;
4783 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4784 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4786 struct cgraph_edge *e;
4787 struct cgraph_node *node;
4789 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4791 if (!e->inline_failed)
4792 cgraph_remove_node_and_inline_clones (e->callee);
4793 else
4794 cgraph_remove_edge (e);
4796 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4797 && id->dst_node->clones)
4798 for (node = id->dst_node->clones; node != id->dst_node;)
4800 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4802 if (!e->inline_failed)
4803 cgraph_remove_node_and_inline_clones (e->callee);
4804 else
4805 cgraph_remove_edge (e);
4808 if (node->clones)
4809 node = node->clones;
4810 else if (node->next_sibling_clone)
4811 node = node->next_sibling_clone;
4812 else
4814 while (node != id->dst_node && !node->next_sibling_clone)
4815 node = node->clone_of;
4816 if (node != id->dst_node)
4817 node = node->next_sibling_clone;
4821 delete_basic_block (b);
4822 changed = true;
4826 if (changed)
4827 tidy_fallthru_edges ();
4828 return changed;
4831 /* Update clone info after duplication. */
4833 static void
4834 update_clone_info (copy_body_data * id)
4836 struct cgraph_node *node;
4837 if (!id->dst_node->clones)
4838 return;
4839 for (node = id->dst_node->clones; node != id->dst_node;)
4841 /* First update replace maps to match the new body. */
4842 if (node->clone.tree_map)
4844 unsigned int i;
4845 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4847 struct ipa_replace_map *replace_info;
4848 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4849 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4850 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4853 if (node->clones)
4854 node = node->clones;
4855 else if (node->next_sibling_clone)
4856 node = node->next_sibling_clone;
4857 else
4859 while (node != id->dst_node && !node->next_sibling_clone)
4860 node = node->clone_of;
4861 if (node != id->dst_node)
4862 node = node->next_sibling_clone;
4867 /* Create a copy of a function's tree.
4868 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4869 of the original function and the new copied function
4870 respectively. In case we want to replace a DECL
4871 tree with another tree while duplicating the function's
4872 body, TREE_MAP represents the mapping between these
4873 trees. If UPDATE_CLONES is set, the call_stmt fields
4874 of edges of clones of the function will be updated. */
4875 void
4876 tree_function_versioning (tree old_decl, tree new_decl,
4877 VEC(ipa_replace_map_p,gc)* tree_map,
4878 bool update_clones, bitmap args_to_skip)
4880 struct cgraph_node *old_version_node;
4881 struct cgraph_node *new_version_node;
4882 copy_body_data id;
4883 tree p;
4884 unsigned i;
4885 struct ipa_replace_map *replace_info;
4886 basic_block old_entry_block, bb;
4887 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4889 tree t_step;
4890 tree old_current_function_decl = current_function_decl;
4891 tree vars = NULL_TREE;
4893 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4894 && TREE_CODE (new_decl) == FUNCTION_DECL);
4895 DECL_POSSIBLY_INLINED (old_decl) = 1;
4897 old_version_node = cgraph_node (old_decl);
4898 new_version_node = cgraph_node (new_decl);
4900 /* Output the inlining info for this abstract function, since it has been
4901 inlined. If we don't do this now, we can lose the information about the
4902 variables in the function when the blocks get blown away as soon as we
4903 remove the cgraph node. */
4904 (*debug_hooks->outlining_inline_function) (old_decl);
4906 DECL_ARTIFICIAL (new_decl) = 1;
4907 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4908 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
4910 /* Prepare the data structures for the tree copy. */
4911 memset (&id, 0, sizeof (id));
4913 /* Generate a new name for the new version. */
4914 id.statements_to_fold = pointer_set_create ();
4916 id.decl_map = pointer_map_create ();
4917 id.debug_map = NULL;
4918 id.src_fn = old_decl;
4919 id.dst_fn = new_decl;
4920 id.src_node = old_version_node;
4921 id.dst_node = new_version_node;
4922 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4923 if (id.src_node->ipa_transforms_to_apply)
4925 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
4926 unsigned int i;
4928 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
4929 id.src_node->ipa_transforms_to_apply);
4930 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
4931 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
4932 VEC_index (ipa_opt_pass,
4933 old_transforms_to_apply,
4934 i));
4937 id.copy_decl = copy_decl_no_change;
4938 id.transform_call_graph_edges
4939 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4940 id.transform_new_cfg = true;
4941 id.transform_return_to_modify = false;
4942 id.transform_lang_insert_block = NULL;
4944 current_function_decl = new_decl;
4945 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4946 (DECL_STRUCT_FUNCTION (old_decl));
4947 initialize_cfun (new_decl, old_decl,
4948 old_entry_block->count);
4949 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
4950 = id.src_cfun->gimple_df->ipa_pta;
4951 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4953 /* Copy the function's static chain. */
4954 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4955 if (p)
4956 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4957 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4958 &id);
4960 /* If there's a tree_map, prepare for substitution. */
4961 if (tree_map)
4962 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4964 gimple init;
4965 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4966 if (replace_info->replace_p)
4968 tree op = replace_info->new_tree;
4969 if (!replace_info->old_tree)
4971 int i = replace_info->parm_num;
4972 tree parm;
4973 for (parm = DECL_ARGUMENTS (old_decl); i; parm = TREE_CHAIN (parm))
4974 i --;
4975 replace_info->old_tree = parm;
4979 STRIP_NOPS (op);
4981 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4982 op = TREE_OPERAND (op, 0);
4984 if (TREE_CODE (op) == ADDR_EXPR)
4986 op = TREE_OPERAND (op, 0);
4987 while (handled_component_p (op))
4988 op = TREE_OPERAND (op, 0);
4989 if (TREE_CODE (op) == VAR_DECL)
4990 add_referenced_var (op);
4992 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4993 init = setup_one_parameter (&id, replace_info->old_tree,
4994 replace_info->new_tree, id.src_fn,
4995 NULL,
4996 &vars);
4997 if (init)
4998 VEC_safe_push (gimple, heap, init_stmts, init);
5001 /* Copy the function's arguments. */
5002 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5003 DECL_ARGUMENTS (new_decl) =
5004 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5005 args_to_skip, &vars);
5007 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5009 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5010 number_blocks (id.dst_fn);
5012 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5014 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
5015 /* Add local vars. */
5016 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
5017 t_step; t_step = TREE_CHAIN (t_step))
5019 tree var = TREE_VALUE (t_step);
5020 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
5021 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
5022 else if (!can_be_nonlocal (var, &id))
5023 cfun->local_decls =
5024 tree_cons (NULL_TREE, remap_decl (var, &id),
5025 cfun->local_decls);
5028 /* Copy the Function's body. */
5029 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5030 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
5032 if (DECL_RESULT (old_decl) != NULL_TREE)
5034 tree *res_decl = &DECL_RESULT (old_decl);
5035 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
5036 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5039 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5040 number_blocks (new_decl);
5042 /* We want to create the BB unconditionally, so that the addition of
5043 debug stmts doesn't affect BB count, which may in the end cause
5044 codegen differences. */
5045 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5046 while (VEC_length (gimple, init_stmts))
5047 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5048 update_clone_info (&id);
5050 /* Remap the nonlocal_goto_save_area, if any. */
5051 if (cfun->nonlocal_goto_save_area)
5053 struct walk_stmt_info wi;
5055 memset (&wi, 0, sizeof (wi));
5056 wi.info = &id;
5057 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5060 /* Clean up. */
5061 pointer_map_destroy (id.decl_map);
5062 if (id.debug_map)
5063 pointer_map_destroy (id.debug_map);
5064 free_dominance_info (CDI_DOMINATORS);
5065 free_dominance_info (CDI_POST_DOMINATORS);
5067 fold_marked_statements (0, id.statements_to_fold);
5068 pointer_set_destroy (id.statements_to_fold);
5069 fold_cond_expr_cond ();
5070 delete_unreachable_blocks_update_callgraph (&id);
5071 if (id.dst_node->analyzed)
5072 cgraph_rebuild_references ();
5073 update_ssa (TODO_update_ssa);
5074 free_dominance_info (CDI_DOMINATORS);
5075 free_dominance_info (CDI_POST_DOMINATORS);
5077 gcc_assert (!id.debug_stmts);
5078 VEC_free (gimple, heap, init_stmts);
5079 pop_cfun ();
5080 current_function_decl = old_current_function_decl;
5081 gcc_assert (!current_function_decl
5082 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5083 return;
5086 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5087 the callee and return the inlined body on success. */
5089 tree
5090 maybe_inline_call_in_expr (tree exp)
5092 tree fn = get_callee_fndecl (exp);
5094 /* We can only try to inline "const" functions. */
5095 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5097 struct pointer_map_t *decl_map = pointer_map_create ();
5098 call_expr_arg_iterator iter;
5099 copy_body_data id;
5100 tree param, arg, t;
5102 /* Remap the parameters. */
5103 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5104 param;
5105 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
5106 *pointer_map_insert (decl_map, param) = arg;
5108 memset (&id, 0, sizeof (id));
5109 id.src_fn = fn;
5110 id.dst_fn = current_function_decl;
5111 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5112 id.decl_map = decl_map;
5114 id.copy_decl = copy_decl_no_change;
5115 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5116 id.transform_new_cfg = false;
5117 id.transform_return_to_modify = true;
5118 id.transform_lang_insert_block = false;
5120 /* Make sure not to unshare trees behind the front-end's back
5121 since front-end specific mechanisms may rely on sharing. */
5122 id.regimplify = false;
5123 id.do_not_unshare = true;
5125 /* We're not inside any EH region. */
5126 id.eh_lp_nr = 0;
5128 t = copy_tree_body (&id);
5129 pointer_map_destroy (decl_map);
5131 /* We can only return something suitable for use in a GENERIC
5132 expression tree. */
5133 if (TREE_CODE (t) == MODIFY_EXPR)
5134 return TREE_OPERAND (t, 1);
5137 return NULL_TREE;
5140 /* Duplicate a type, fields and all. */
5142 tree
5143 build_duplicate_type (tree type)
5145 struct copy_body_data id;
5147 memset (&id, 0, sizeof (id));
5148 id.src_fn = current_function_decl;
5149 id.dst_fn = current_function_decl;
5150 id.src_cfun = cfun;
5151 id.decl_map = pointer_map_create ();
5152 id.debug_map = NULL;
5153 id.copy_decl = copy_decl_no_change;
5155 type = remap_type_1 (type, &id);
5157 pointer_map_destroy (id.decl_map);
5158 if (id.debug_map)
5159 pointer_map_destroy (id.debug_map);
5161 TYPE_CANONICAL (type) = type;
5163 return type;
5166 /* Return whether it is safe to inline a function because it used different
5167 target specific options or call site actual types mismatch parameter types.
5168 E is the call edge to be checked. */
5169 bool
5170 tree_can_inline_p (struct cgraph_edge *e)
5172 #if 0
5173 /* This causes a regression in SPEC in that it prevents a cold function from
5174 inlining a hot function. Perhaps this should only apply to functions
5175 that the user declares hot/cold/optimize explicitly. */
5177 /* Don't inline a function with a higher optimization level than the
5178 caller, or with different space constraints (hot/cold functions). */
5179 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5180 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5182 if (caller_tree != callee_tree)
5184 struct cl_optimization *caller_opt
5185 = TREE_OPTIMIZATION ((caller_tree)
5186 ? caller_tree
5187 : optimization_default_node);
5189 struct cl_optimization *callee_opt
5190 = TREE_OPTIMIZATION ((callee_tree)
5191 ? callee_tree
5192 : optimization_default_node);
5194 if ((caller_opt->optimize > callee_opt->optimize)
5195 || (caller_opt->optimize_size != callee_opt->optimize_size))
5196 return false;
5198 #endif
5199 tree caller, callee, lhs;
5201 caller = e->caller->decl;
5202 callee = e->callee->decl;
5204 /* First check that inlining isn't simply forbidden in this case. */
5205 if (inline_forbidden_into_p (caller, callee))
5207 e->inline_failed = CIF_UNSPECIFIED;
5208 gimple_call_set_cannot_inline (e->call_stmt, true);
5209 return false;
5212 /* Allow the backend to decide if inlining is ok. */
5213 if (!targetm.target_option.can_inline_p (caller, callee))
5215 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5216 gimple_call_set_cannot_inline (e->call_stmt, true);
5217 e->call_stmt_cannot_inline_p = true;
5218 return false;
5221 /* Do not inline calls where we cannot triviall work around mismatches
5222 in argument or return types. */
5223 if (e->call_stmt
5224 && ((DECL_RESULT (callee)
5225 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
5226 && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
5227 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
5228 TREE_TYPE (lhs))
5229 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
5230 || !gimple_check_call_args (e->call_stmt)))
5232 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5233 gimple_call_set_cannot_inline (e->call_stmt, true);
5234 e->call_stmt_cannot_inline_p = true;
5235 return false;
5238 return true;