20090811-1.c: Skip for incompatible options, do not override other options.
[official-gcc.git] / gcc / tree-inline.c
blobdc3288b5b37e778b34d61811ac49fbe2cd32423c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "diagnostic-core.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "insn-config.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-iterator.h"
37 #include "cgraph.h"
38 #include "intl.h"
39 #include "tree-mudflap.h"
40 #include "tree-flow.h"
41 #include "function.h"
42 #include "tree-flow.h"
43 #include "tree-pretty-print.h"
44 #include "except.h"
45 #include "debug.h"
46 #include "pointer-set.h"
47 #include "ipa-prop.h"
48 #include "value-prof.h"
49 #include "tree-pass.h"
50 #include "target.h"
51 #include "integrate.h"
53 #include "rtl.h" /* FIXME: For asm_str_count. */
55 /* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57 #include "gimple.h"
59 /* Inlining, Cloning, Versioning, Parallelization
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 MODIFY_EXPRs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX statements are adjusted accordingly.
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
89 See the CALL_EXPR handling case in copy_tree_body_r (). */
91 /* To Do:
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
104 /* Weights that estimate_num_insns uses to estimate the size of the
105 produced code. */
107 eni_weights eni_size_weights;
109 /* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
112 eni_weights eni_time_weights;
114 /* Prototypes. */
116 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
117 static void remap_block (tree *, copy_body_data *);
118 static void copy_bind_expr (tree *, int *, copy_body_data *);
119 static tree mark_local_for_remap_r (tree *, int *, void *);
120 static void unsave_expr_1 (tree);
121 static tree unsave_r (tree *, int *, void *);
122 static void declare_inline_vars (tree, tree);
123 static void remap_save_expr (tree *, void *, int *);
124 static void prepend_lexical_block (tree current_block, tree new_block);
125 static tree copy_decl_to_var (tree, copy_body_data *);
126 static tree copy_result_decl_to_var (tree, copy_body_data *);
127 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128 static gimple remap_gimple_stmt (gimple, copy_body_data *);
129 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
131 /* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
134 void
135 insert_decl_map (copy_body_data *id, tree key, tree value)
137 *pointer_map_insert (id->decl_map, key) = value;
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
142 *pointer_map_insert (id->decl_map, value) = value;
145 /* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
148 static void
149 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
157 if (!target_for_debug_bind (key))
158 return;
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
166 *pointer_map_insert (id->debug_map, key) = value;
169 /* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173 static int processing_debug_stmt = 0;
175 /* Construct new SSA name for old NAME. ID is the inline context. */
177 static tree
178 remap_ssa_name (tree name, copy_body_data *id)
180 tree new_tree;
181 tree *n;
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
185 n = (tree *) pointer_map_contains (id->decl_map, name);
186 if (n)
187 return unshare_expr (*n);
189 if (processing_debug_stmt)
191 processing_debug_stmt = -1;
192 return name;
195 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
196 in copy_bb. */
197 new_tree = remap_decl (SSA_NAME_VAR (name), id);
199 /* We might've substituted constant or another SSA_NAME for
200 the variable.
202 Replace the SSA name representing RESULT_DECL by variable during
203 inlining: this saves us from need to introduce PHI node in a case
204 return value is just partly initialized. */
205 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
206 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
207 || !id->transform_return_to_modify))
209 struct ptr_info_def *pi;
210 new_tree = make_ssa_name (new_tree, NULL);
211 insert_decl_map (id, name, new_tree);
212 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
213 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
214 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
215 /* At least IPA points-to info can be directly transferred. */
216 if (id->src_cfun->gimple_df
217 && id->src_cfun->gimple_df->ipa_pta
218 && (pi = SSA_NAME_PTR_INFO (name))
219 && !pi->pt.anything)
221 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
222 new_pi->pt = pi->pt;
224 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
226 /* By inlining function having uninitialized variable, we might
227 extend the lifetime (variable might get reused). This cause
228 ICE in the case we end up extending lifetime of SSA name across
229 abnormal edge, but also increase register pressure.
231 We simply initialize all uninitialized vars by 0 except
232 for case we are inlining to very first BB. We can avoid
233 this for all BBs that are not inside strongly connected
234 regions of the CFG, but this is expensive to test. */
235 if (id->entry_bb
236 && is_gimple_reg (SSA_NAME_VAR (name))
237 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
238 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
239 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
240 || EDGE_COUNT (id->entry_bb->preds) != 1))
242 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
243 gimple init_stmt;
244 tree zero = build_zero_cst (TREE_TYPE (new_tree));
246 init_stmt = gimple_build_assign (new_tree, zero);
247 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
248 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
250 else
252 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
253 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
254 == name)
255 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
259 else
260 insert_decl_map (id, name, new_tree);
261 return new_tree;
264 /* Remap DECL during the copying of the BLOCK tree for the function. */
266 tree
267 remap_decl (tree decl, copy_body_data *id)
269 tree *n;
271 /* We only remap local variables in the current function. */
273 /* See if we have remapped this declaration. */
275 n = (tree *) pointer_map_contains (id->decl_map, decl);
277 if (!n && processing_debug_stmt)
279 processing_debug_stmt = -1;
280 return decl;
283 /* If we didn't already have an equivalent for this declaration,
284 create one now. */
285 if (!n)
287 /* Make a copy of the variable or label. */
288 tree t = id->copy_decl (decl, id);
290 /* Remember it, so that if we encounter this local entity again
291 we can reuse this copy. Do this early because remap_type may
292 need this decl for TYPE_STUB_DECL. */
293 insert_decl_map (id, decl, t);
295 if (!DECL_P (t))
296 return t;
298 /* Remap types, if necessary. */
299 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
300 if (TREE_CODE (t) == TYPE_DECL)
301 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
303 /* Remap sizes as necessary. */
304 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
305 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
307 /* If fields, do likewise for offset and qualifier. */
308 if (TREE_CODE (t) == FIELD_DECL)
310 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
311 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
312 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
315 if ((TREE_CODE (t) == VAR_DECL
316 || TREE_CODE (t) == RESULT_DECL
317 || TREE_CODE (t) == PARM_DECL)
318 && id->src_fn && DECL_STRUCT_FUNCTION (id->src_fn)
319 && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
320 /* We don't want to mark as referenced VAR_DECLs that were
321 not marked as such in the src function. */
322 && (TREE_CODE (decl) != VAR_DECL
323 || referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
324 DECL_UID (decl))))
325 add_referenced_var (t);
326 return t;
329 if (id->do_not_unshare)
330 return *n;
331 else
332 return unshare_expr (*n);
335 static tree
336 remap_type_1 (tree type, copy_body_data *id)
338 tree new_tree, t;
340 /* We do need a copy. build and register it now. If this is a pointer or
341 reference type, remap the designated type and make a new pointer or
342 reference type. */
343 if (TREE_CODE (type) == POINTER_TYPE)
345 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
346 TYPE_MODE (type),
347 TYPE_REF_CAN_ALIAS_ALL (type));
348 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
349 new_tree = build_type_attribute_qual_variant (new_tree,
350 TYPE_ATTRIBUTES (type),
351 TYPE_QUALS (type));
352 insert_decl_map (id, type, new_tree);
353 return new_tree;
355 else if (TREE_CODE (type) == REFERENCE_TYPE)
357 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
358 TYPE_MODE (type),
359 TYPE_REF_CAN_ALIAS_ALL (type));
360 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
361 new_tree = build_type_attribute_qual_variant (new_tree,
362 TYPE_ATTRIBUTES (type),
363 TYPE_QUALS (type));
364 insert_decl_map (id, type, new_tree);
365 return new_tree;
367 else
368 new_tree = copy_node (type);
370 insert_decl_map (id, type, new_tree);
372 /* This is a new type, not a copy of an old type. Need to reassociate
373 variants. We can handle everything except the main variant lazily. */
374 t = TYPE_MAIN_VARIANT (type);
375 if (type != t)
377 t = remap_type (t, id);
378 TYPE_MAIN_VARIANT (new_tree) = t;
379 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
380 TYPE_NEXT_VARIANT (t) = new_tree;
382 else
384 TYPE_MAIN_VARIANT (new_tree) = new_tree;
385 TYPE_NEXT_VARIANT (new_tree) = NULL;
388 if (TYPE_STUB_DECL (type))
389 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
391 /* Lazily create pointer and reference types. */
392 TYPE_POINTER_TO (new_tree) = NULL;
393 TYPE_REFERENCE_TO (new_tree) = NULL;
395 switch (TREE_CODE (new_tree))
397 case INTEGER_TYPE:
398 case REAL_TYPE:
399 case FIXED_POINT_TYPE:
400 case ENUMERAL_TYPE:
401 case BOOLEAN_TYPE:
402 t = TYPE_MIN_VALUE (new_tree);
403 if (t && TREE_CODE (t) != INTEGER_CST)
404 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
406 t = TYPE_MAX_VALUE (new_tree);
407 if (t && TREE_CODE (t) != INTEGER_CST)
408 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
409 return new_tree;
411 case FUNCTION_TYPE:
412 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
413 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
414 return new_tree;
416 case ARRAY_TYPE:
417 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
418 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
419 break;
421 case RECORD_TYPE:
422 case UNION_TYPE:
423 case QUAL_UNION_TYPE:
425 tree f, nf = NULL;
427 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
429 t = remap_decl (f, id);
430 DECL_CONTEXT (t) = new_tree;
431 DECL_CHAIN (t) = nf;
432 nf = t;
434 TYPE_FIELDS (new_tree) = nreverse (nf);
436 break;
438 case OFFSET_TYPE:
439 default:
440 /* Shouldn't have been thought variable sized. */
441 gcc_unreachable ();
444 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
445 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
447 return new_tree;
450 tree
451 remap_type (tree type, copy_body_data *id)
453 tree *node;
454 tree tmp;
456 if (type == NULL)
457 return type;
459 /* See if we have remapped this type. */
460 node = (tree *) pointer_map_contains (id->decl_map, type);
461 if (node)
462 return *node;
464 /* The type only needs remapping if it's variably modified. */
465 if (! variably_modified_type_p (type, id->src_fn))
467 insert_decl_map (id, type, type);
468 return type;
471 id->remapping_type_depth++;
472 tmp = remap_type_1 (type, id);
473 id->remapping_type_depth--;
475 return tmp;
478 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
479 is NULL or TYPE has not been remapped before. */
481 static tree
482 remapped_type (tree type, copy_body_data *id)
484 tree *node;
486 if (type == NULL)
487 return type;
489 /* See if we have remapped this type. */
490 node = (tree *) pointer_map_contains (id->decl_map, type);
491 if (node)
492 return *node;
493 else
494 return NULL;
497 /* The type only needs remapping if it's variably modified. */
498 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
500 static bool
501 can_be_nonlocal (tree decl, copy_body_data *id)
503 /* We can not duplicate function decls. */
504 if (TREE_CODE (decl) == FUNCTION_DECL)
505 return true;
507 /* Local static vars must be non-local or we get multiple declaration
508 problems. */
509 if (TREE_CODE (decl) == VAR_DECL
510 && !auto_var_in_fn_p (decl, id->src_fn))
511 return true;
513 /* At the moment dwarf2out can handle only these types of nodes. We
514 can support more later. */
515 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
516 return false;
518 /* We must use global type. We call remapped_type instead of
519 remap_type since we don't want to remap this type here if it
520 hasn't been remapped before. */
521 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
522 return false;
524 /* Wihtout SSA we can't tell if variable is used. */
525 if (!gimple_in_ssa_p (cfun))
526 return false;
528 /* Live variables must be copied so we can attach DECL_RTL. */
529 if (var_ann (decl))
530 return false;
532 return true;
535 static tree
536 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
538 tree old_var;
539 tree new_decls = NULL_TREE;
541 /* Remap its variables. */
542 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
544 tree new_var;
546 if (can_be_nonlocal (old_var, id))
548 if (TREE_CODE (old_var) == VAR_DECL
549 && ! DECL_EXTERNAL (old_var)
550 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
551 add_local_decl (cfun, old_var);
552 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
553 && !DECL_IGNORED_P (old_var)
554 && nonlocalized_list)
555 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
556 continue;
559 /* Remap the variable. */
560 new_var = remap_decl (old_var, id);
562 /* If we didn't remap this variable, we can't mess with its
563 TREE_CHAIN. If we remapped this variable to the return slot, it's
564 already declared somewhere else, so don't declare it here. */
566 if (new_var == id->retvar)
568 else if (!new_var)
570 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
571 && !DECL_IGNORED_P (old_var)
572 && nonlocalized_list)
573 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
575 else
577 gcc_assert (DECL_P (new_var));
578 DECL_CHAIN (new_var) = new_decls;
579 new_decls = new_var;
581 /* Also copy value-expressions. */
582 if (TREE_CODE (new_var) == VAR_DECL
583 && DECL_HAS_VALUE_EXPR_P (new_var))
585 tree tem = DECL_VALUE_EXPR (new_var);
586 bool old_regimplify = id->regimplify;
587 id->remapping_type_depth++;
588 walk_tree (&tem, copy_tree_body_r, id, NULL);
589 id->remapping_type_depth--;
590 id->regimplify = old_regimplify;
591 SET_DECL_VALUE_EXPR (new_var, tem);
596 return nreverse (new_decls);
599 /* Copy the BLOCK to contain remapped versions of the variables
600 therein. And hook the new block into the block-tree. */
602 static void
603 remap_block (tree *block, copy_body_data *id)
605 tree old_block;
606 tree new_block;
608 /* Make the new block. */
609 old_block = *block;
610 new_block = make_node (BLOCK);
611 TREE_USED (new_block) = TREE_USED (old_block);
612 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
613 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
614 BLOCK_NONLOCALIZED_VARS (new_block)
615 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
616 *block = new_block;
618 /* Remap its variables. */
619 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
620 &BLOCK_NONLOCALIZED_VARS (new_block),
621 id);
623 if (id->transform_lang_insert_block)
624 id->transform_lang_insert_block (new_block);
626 /* Remember the remapped block. */
627 insert_decl_map (id, old_block, new_block);
630 /* Copy the whole block tree and root it in id->block. */
631 static tree
632 remap_blocks (tree block, copy_body_data *id)
634 tree t;
635 tree new_tree = block;
637 if (!block)
638 return NULL;
640 remap_block (&new_tree, id);
641 gcc_assert (new_tree != block);
642 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
643 prepend_lexical_block (new_tree, remap_blocks (t, id));
644 /* Blocks are in arbitrary order, but make things slightly prettier and do
645 not swap order when producing a copy. */
646 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
647 return new_tree;
650 static void
651 copy_statement_list (tree *tp)
653 tree_stmt_iterator oi, ni;
654 tree new_tree;
656 new_tree = alloc_stmt_list ();
657 ni = tsi_start (new_tree);
658 oi = tsi_start (*tp);
659 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
660 *tp = new_tree;
662 for (; !tsi_end_p (oi); tsi_next (&oi))
664 tree stmt = tsi_stmt (oi);
665 if (TREE_CODE (stmt) == STATEMENT_LIST)
666 /* This copy is not redundant; tsi_link_after will smash this
667 STATEMENT_LIST into the end of the one we're building, and we
668 don't want to do that with the original. */
669 copy_statement_list (&stmt);
670 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
674 static void
675 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
677 tree block = BIND_EXPR_BLOCK (*tp);
678 /* Copy (and replace) the statement. */
679 copy_tree_r (tp, walk_subtrees, NULL);
680 if (block)
682 remap_block (&block, id);
683 BIND_EXPR_BLOCK (*tp) = block;
686 if (BIND_EXPR_VARS (*tp))
687 /* This will remap a lot of the same decls again, but this should be
688 harmless. */
689 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
693 /* Create a new gimple_seq by remapping all the statements in BODY
694 using the inlining information in ID. */
696 static gimple_seq
697 remap_gimple_seq (gimple_seq body, copy_body_data *id)
699 gimple_stmt_iterator si;
700 gimple_seq new_body = NULL;
702 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
704 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
705 gimple_seq_add_stmt (&new_body, new_stmt);
708 return new_body;
712 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
713 block using the mapping information in ID. */
715 static gimple
716 copy_gimple_bind (gimple stmt, copy_body_data *id)
718 gimple new_bind;
719 tree new_block, new_vars;
720 gimple_seq body, new_body;
722 /* Copy the statement. Note that we purposely don't use copy_stmt
723 here because we need to remap statements as we copy. */
724 body = gimple_bind_body (stmt);
725 new_body = remap_gimple_seq (body, id);
727 new_block = gimple_bind_block (stmt);
728 if (new_block)
729 remap_block (&new_block, id);
731 /* This will remap a lot of the same decls again, but this should be
732 harmless. */
733 new_vars = gimple_bind_vars (stmt);
734 if (new_vars)
735 new_vars = remap_decls (new_vars, NULL, id);
737 new_bind = gimple_build_bind (new_vars, new_body, new_block);
739 return new_bind;
743 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
744 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
745 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
746 recursing into the children nodes of *TP. */
748 static tree
749 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
751 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
752 copy_body_data *id = (copy_body_data *) wi_p->info;
753 tree fn = id->src_fn;
755 if (TREE_CODE (*tp) == SSA_NAME)
757 *tp = remap_ssa_name (*tp, id);
758 *walk_subtrees = 0;
759 return NULL;
761 else if (auto_var_in_fn_p (*tp, fn))
763 /* Local variables and labels need to be replaced by equivalent
764 variables. We don't want to copy static variables; there's
765 only one of those, no matter how many times we inline the
766 containing function. Similarly for globals from an outer
767 function. */
768 tree new_decl;
770 /* Remap the declaration. */
771 new_decl = remap_decl (*tp, id);
772 gcc_assert (new_decl);
773 /* Replace this variable with the copy. */
774 STRIP_TYPE_NOPS (new_decl);
775 /* ??? The C++ frontend uses void * pointer zero to initialize
776 any other type. This confuses the middle-end type verification.
777 As cloned bodies do not go through gimplification again the fixup
778 there doesn't trigger. */
779 if (TREE_CODE (new_decl) == INTEGER_CST
780 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
781 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
782 *tp = new_decl;
783 *walk_subtrees = 0;
785 else if (TREE_CODE (*tp) == STATEMENT_LIST)
786 gcc_unreachable ();
787 else if (TREE_CODE (*tp) == SAVE_EXPR)
788 gcc_unreachable ();
789 else if (TREE_CODE (*tp) == LABEL_DECL
790 && (!DECL_CONTEXT (*tp)
791 || decl_function_context (*tp) == id->src_fn))
792 /* These may need to be remapped for EH handling. */
793 *tp = remap_decl (*tp, id);
794 else if (TYPE_P (*tp))
795 /* Types may need remapping as well. */
796 *tp = remap_type (*tp, id);
797 else if (CONSTANT_CLASS_P (*tp))
799 /* If this is a constant, we have to copy the node iff the type
800 will be remapped. copy_tree_r will not copy a constant. */
801 tree new_type = remap_type (TREE_TYPE (*tp), id);
803 if (new_type == TREE_TYPE (*tp))
804 *walk_subtrees = 0;
806 else if (TREE_CODE (*tp) == INTEGER_CST)
807 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
808 TREE_INT_CST_HIGH (*tp));
809 else
811 *tp = copy_node (*tp);
812 TREE_TYPE (*tp) = new_type;
815 else
817 /* Otherwise, just copy the node. Note that copy_tree_r already
818 knows not to copy VAR_DECLs, etc., so this is safe. */
820 /* We should never have TREE_BLOCK set on non-statements. */
821 if (EXPR_P (*tp))
822 gcc_assert (!TREE_BLOCK (*tp));
824 if (TREE_CODE (*tp) == MEM_REF)
826 tree ptr = TREE_OPERAND (*tp, 0);
827 tree type = remap_type (TREE_TYPE (*tp), id);
828 tree old = *tp;
829 tree tem;
831 /* We need to re-canonicalize MEM_REFs from inline substitutions
832 that can happen when a pointer argument is an ADDR_EXPR.
833 Recurse here manually to allow that. */
834 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
835 if ((tem = maybe_fold_offset_to_reference (EXPR_LOCATION (*tp),
836 ptr,
837 TREE_OPERAND (*tp, 1),
838 type))
839 && TREE_THIS_VOLATILE (tem) == TREE_THIS_VOLATILE (old))
841 tree *tem_basep = &tem;
842 while (handled_component_p (*tem_basep))
843 tem_basep = &TREE_OPERAND (*tem_basep, 0);
844 if (TREE_CODE (*tem_basep) == MEM_REF)
845 *tem_basep
846 = build2 (MEM_REF, TREE_TYPE (*tem_basep),
847 TREE_OPERAND (*tem_basep, 0),
848 fold_convert (TREE_TYPE (TREE_OPERAND (*tp, 1)),
849 TREE_OPERAND (*tem_basep, 1)));
850 else
851 *tem_basep
852 = build2 (MEM_REF, TREE_TYPE (*tem_basep),
853 build_fold_addr_expr (*tem_basep),
854 build_int_cst
855 (TREE_TYPE (TREE_OPERAND (*tp, 1)), 0));
856 *tp = tem;
858 else
860 *tp = fold_build2 (MEM_REF, type,
861 ptr, TREE_OPERAND (*tp, 1));
862 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
863 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
865 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
866 *walk_subtrees = 0;
867 return NULL;
870 /* Here is the "usual case". Copy this tree node, and then
871 tweak some special cases. */
872 copy_tree_r (tp, walk_subtrees, NULL);
874 if (TREE_CODE (*tp) != OMP_CLAUSE)
875 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
877 /* Global variables we haven't seen yet need to go into referenced
878 vars. If not referenced from types only. */
879 if (gimple_in_ssa_p (cfun)
880 && TREE_CODE (*tp) == VAR_DECL
881 && id->remapping_type_depth == 0
882 && !processing_debug_stmt)
883 add_referenced_var (*tp);
885 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
887 /* The copied TARGET_EXPR has never been expanded, even if the
888 original node was expanded already. */
889 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
890 TREE_OPERAND (*tp, 3) = NULL_TREE;
892 else if (TREE_CODE (*tp) == ADDR_EXPR)
894 /* Variable substitution need not be simple. In particular,
895 the MEM_REF substitution above. Make sure that
896 TREE_CONSTANT and friends are up-to-date. But make sure
897 to not improperly set TREE_BLOCK on some sub-expressions. */
898 int invariant = is_gimple_min_invariant (*tp);
899 tree block = id->block;
900 id->block = NULL_TREE;
901 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
902 id->block = block;
903 recompute_tree_invariant_for_addr_expr (*tp);
905 /* If this used to be invariant, but is not any longer,
906 then regimplification is probably needed. */
907 if (invariant && !is_gimple_min_invariant (*tp))
908 id->regimplify = true;
910 *walk_subtrees = 0;
914 /* Keep iterating. */
915 return NULL_TREE;
919 /* Called from copy_body_id via walk_tree. DATA is really a
920 `copy_body_data *'. */
922 tree
923 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
925 copy_body_data *id = (copy_body_data *) data;
926 tree fn = id->src_fn;
927 tree new_block;
929 /* Begin by recognizing trees that we'll completely rewrite for the
930 inlining context. Our output for these trees is completely
931 different from out input (e.g. RETURN_EXPR is deleted, and morphs
932 into an edge). Further down, we'll handle trees that get
933 duplicated and/or tweaked. */
935 /* When requested, RETURN_EXPRs should be transformed to just the
936 contained MODIFY_EXPR. The branch semantics of the return will
937 be handled elsewhere by manipulating the CFG rather than a statement. */
938 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
940 tree assignment = TREE_OPERAND (*tp, 0);
942 /* If we're returning something, just turn that into an
943 assignment into the equivalent of the original RESULT_DECL.
944 If the "assignment" is just the result decl, the result
945 decl has already been set (e.g. a recent "foo (&result_decl,
946 ...)"); just toss the entire RETURN_EXPR. */
947 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
949 /* Replace the RETURN_EXPR with (a copy of) the
950 MODIFY_EXPR hanging underneath. */
951 *tp = copy_node (assignment);
953 else /* Else the RETURN_EXPR returns no value. */
955 *tp = NULL;
956 return (tree) (void *)1;
959 else if (TREE_CODE (*tp) == SSA_NAME)
961 *tp = remap_ssa_name (*tp, id);
962 *walk_subtrees = 0;
963 return NULL;
966 /* Local variables and labels need to be replaced by equivalent
967 variables. We don't want to copy static variables; there's only
968 one of those, no matter how many times we inline the containing
969 function. Similarly for globals from an outer function. */
970 else if (auto_var_in_fn_p (*tp, fn))
972 tree new_decl;
974 /* Remap the declaration. */
975 new_decl = remap_decl (*tp, id);
976 gcc_assert (new_decl);
977 /* Replace this variable with the copy. */
978 STRIP_TYPE_NOPS (new_decl);
979 *tp = new_decl;
980 *walk_subtrees = 0;
982 else if (TREE_CODE (*tp) == STATEMENT_LIST)
983 copy_statement_list (tp);
984 else if (TREE_CODE (*tp) == SAVE_EXPR
985 || TREE_CODE (*tp) == TARGET_EXPR)
986 remap_save_expr (tp, id->decl_map, walk_subtrees);
987 else if (TREE_CODE (*tp) == LABEL_DECL
988 && (! DECL_CONTEXT (*tp)
989 || decl_function_context (*tp) == id->src_fn))
990 /* These may need to be remapped for EH handling. */
991 *tp = remap_decl (*tp, id);
992 else if (TREE_CODE (*tp) == BIND_EXPR)
993 copy_bind_expr (tp, walk_subtrees, id);
994 /* Types may need remapping as well. */
995 else if (TYPE_P (*tp))
996 *tp = remap_type (*tp, id);
998 /* If this is a constant, we have to copy the node iff the type will be
999 remapped. copy_tree_r will not copy a constant. */
1000 else if (CONSTANT_CLASS_P (*tp))
1002 tree new_type = remap_type (TREE_TYPE (*tp), id);
1004 if (new_type == TREE_TYPE (*tp))
1005 *walk_subtrees = 0;
1007 else if (TREE_CODE (*tp) == INTEGER_CST)
1008 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1009 TREE_INT_CST_HIGH (*tp));
1010 else
1012 *tp = copy_node (*tp);
1013 TREE_TYPE (*tp) = new_type;
1017 /* Otherwise, just copy the node. Note that copy_tree_r already
1018 knows not to copy VAR_DECLs, etc., so this is safe. */
1019 else
1021 /* Here we handle trees that are not completely rewritten.
1022 First we detect some inlining-induced bogosities for
1023 discarding. */
1024 if (TREE_CODE (*tp) == MODIFY_EXPR
1025 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1026 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1028 /* Some assignments VAR = VAR; don't generate any rtl code
1029 and thus don't count as variable modification. Avoid
1030 keeping bogosities like 0 = 0. */
1031 tree decl = TREE_OPERAND (*tp, 0), value;
1032 tree *n;
1034 n = (tree *) pointer_map_contains (id->decl_map, decl);
1035 if (n)
1037 value = *n;
1038 STRIP_TYPE_NOPS (value);
1039 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1041 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1042 return copy_tree_body_r (tp, walk_subtrees, data);
1046 else if (TREE_CODE (*tp) == INDIRECT_REF)
1048 /* Get rid of *& from inline substitutions that can happen when a
1049 pointer argument is an ADDR_EXPR. */
1050 tree decl = TREE_OPERAND (*tp, 0);
1051 tree *n;
1053 n = (tree *) pointer_map_contains (id->decl_map, decl);
1054 if (n)
1056 tree new_tree;
1057 tree old;
1058 /* If we happen to get an ADDR_EXPR in n->value, strip
1059 it manually here as we'll eventually get ADDR_EXPRs
1060 which lie about their types pointed to. In this case
1061 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1062 but we absolutely rely on that. As fold_indirect_ref
1063 does other useful transformations, try that first, though. */
1064 tree type = TREE_TYPE (TREE_TYPE (*n));
1065 if (id->do_not_unshare)
1066 new_tree = *n;
1067 else
1068 new_tree = unshare_expr (*n);
1069 old = *tp;
1070 *tp = gimple_fold_indirect_ref (new_tree);
1071 if (! *tp)
1073 if (TREE_CODE (new_tree) == ADDR_EXPR)
1075 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1076 type, new_tree);
1077 /* ??? We should either assert here or build
1078 a VIEW_CONVERT_EXPR instead of blindly leaking
1079 incompatible types to our IL. */
1080 if (! *tp)
1081 *tp = TREE_OPERAND (new_tree, 0);
1083 else
1085 *tp = build1 (INDIRECT_REF, type, new_tree);
1086 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1087 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1088 TREE_READONLY (*tp) = TREE_READONLY (old);
1089 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1092 *walk_subtrees = 0;
1093 return NULL;
1096 else if (TREE_CODE (*tp) == MEM_REF)
1098 /* We need to re-canonicalize MEM_REFs from inline substitutions
1099 that can happen when a pointer argument is an ADDR_EXPR. */
1100 tree decl = TREE_OPERAND (*tp, 0);
1101 tree *n;
1103 n = (tree *) pointer_map_contains (id->decl_map, decl);
1104 if (n)
1106 tree old = *tp;
1107 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1108 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1109 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1110 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1111 *walk_subtrees = 0;
1112 return NULL;
1116 /* Here is the "usual case". Copy this tree node, and then
1117 tweak some special cases. */
1118 copy_tree_r (tp, walk_subtrees, NULL);
1120 /* Global variables we haven't seen yet needs to go into referenced
1121 vars. If not referenced from types or debug stmts only. */
1122 if (gimple_in_ssa_p (cfun)
1123 && TREE_CODE (*tp) == VAR_DECL
1124 && id->remapping_type_depth == 0
1125 && !processing_debug_stmt)
1126 add_referenced_var (*tp);
1128 /* If EXPR has block defined, map it to newly constructed block.
1129 When inlining we want EXPRs without block appear in the block
1130 of function call if we are not remapping a type. */
1131 if (EXPR_P (*tp))
1133 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1134 if (TREE_BLOCK (*tp))
1136 tree *n;
1137 n = (tree *) pointer_map_contains (id->decl_map,
1138 TREE_BLOCK (*tp));
1139 gcc_assert (n || id->remapping_type_depth != 0);
1140 if (n)
1141 new_block = *n;
1143 TREE_BLOCK (*tp) = new_block;
1146 if (TREE_CODE (*tp) != OMP_CLAUSE)
1147 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1149 /* The copied TARGET_EXPR has never been expanded, even if the
1150 original node was expanded already. */
1151 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1153 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1154 TREE_OPERAND (*tp, 3) = NULL_TREE;
1157 /* Variable substitution need not be simple. In particular, the
1158 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1159 and friends are up-to-date. */
1160 else if (TREE_CODE (*tp) == ADDR_EXPR)
1162 int invariant = is_gimple_min_invariant (*tp);
1163 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1165 /* Handle the case where we substituted an INDIRECT_REF
1166 into the operand of the ADDR_EXPR. */
1167 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1168 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1169 else
1170 recompute_tree_invariant_for_addr_expr (*tp);
1172 /* If this used to be invariant, but is not any longer,
1173 then regimplification is probably needed. */
1174 if (invariant && !is_gimple_min_invariant (*tp))
1175 id->regimplify = true;
1177 *walk_subtrees = 0;
1181 /* Keep iterating. */
1182 return NULL_TREE;
1185 /* Helper for remap_gimple_stmt. Given an EH region number for the
1186 source function, map that to the duplicate EH region number in
1187 the destination function. */
1189 static int
1190 remap_eh_region_nr (int old_nr, copy_body_data *id)
1192 eh_region old_r, new_r;
1193 void **slot;
1195 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1196 slot = pointer_map_contains (id->eh_map, old_r);
1197 new_r = (eh_region) *slot;
1199 return new_r->index;
1202 /* Similar, but operate on INTEGER_CSTs. */
1204 static tree
1205 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1207 int old_nr, new_nr;
1209 old_nr = tree_low_cst (old_t_nr, 0);
1210 new_nr = remap_eh_region_nr (old_nr, id);
1212 return build_int_cst (integer_type_node, new_nr);
1215 /* Helper for copy_bb. Remap statement STMT using the inlining
1216 information in ID. Return the new statement copy. */
1218 static gimple
1219 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1221 gimple copy = NULL;
1222 struct walk_stmt_info wi;
1223 tree new_block;
1224 bool skip_first = false;
1226 /* Begin by recognizing trees that we'll completely rewrite for the
1227 inlining context. Our output for these trees is completely
1228 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1229 into an edge). Further down, we'll handle trees that get
1230 duplicated and/or tweaked. */
1232 /* When requested, GIMPLE_RETURNs should be transformed to just the
1233 contained GIMPLE_ASSIGN. The branch semantics of the return will
1234 be handled elsewhere by manipulating the CFG rather than the
1235 statement. */
1236 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1238 tree retval = gimple_return_retval (stmt);
1240 /* If we're returning something, just turn that into an
1241 assignment into the equivalent of the original RESULT_DECL.
1242 If RETVAL is just the result decl, the result decl has
1243 already been set (e.g. a recent "foo (&result_decl, ...)");
1244 just toss the entire GIMPLE_RETURN. */
1245 if (retval
1246 && (TREE_CODE (retval) != RESULT_DECL
1247 && (TREE_CODE (retval) != SSA_NAME
1248 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1250 copy = gimple_build_assign (id->retvar, retval);
1251 /* id->retvar is already substituted. Skip it on later remapping. */
1252 skip_first = true;
1254 else
1255 return gimple_build_nop ();
1257 else if (gimple_has_substatements (stmt))
1259 gimple_seq s1, s2;
1261 /* When cloning bodies from the C++ front end, we will be handed bodies
1262 in High GIMPLE form. Handle here all the High GIMPLE statements that
1263 have embedded statements. */
1264 switch (gimple_code (stmt))
1266 case GIMPLE_BIND:
1267 copy = copy_gimple_bind (stmt, id);
1268 break;
1270 case GIMPLE_CATCH:
1271 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1272 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1273 break;
1275 case GIMPLE_EH_FILTER:
1276 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1277 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1278 break;
1280 case GIMPLE_TRY:
1281 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1282 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1283 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1284 break;
1286 case GIMPLE_WITH_CLEANUP_EXPR:
1287 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1288 copy = gimple_build_wce (s1);
1289 break;
1291 case GIMPLE_OMP_PARALLEL:
1292 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1293 copy = gimple_build_omp_parallel
1294 (s1,
1295 gimple_omp_parallel_clauses (stmt),
1296 gimple_omp_parallel_child_fn (stmt),
1297 gimple_omp_parallel_data_arg (stmt));
1298 break;
1300 case GIMPLE_OMP_TASK:
1301 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1302 copy = gimple_build_omp_task
1303 (s1,
1304 gimple_omp_task_clauses (stmt),
1305 gimple_omp_task_child_fn (stmt),
1306 gimple_omp_task_data_arg (stmt),
1307 gimple_omp_task_copy_fn (stmt),
1308 gimple_omp_task_arg_size (stmt),
1309 gimple_omp_task_arg_align (stmt));
1310 break;
1312 case GIMPLE_OMP_FOR:
1313 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1314 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1315 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1316 gimple_omp_for_collapse (stmt), s2);
1318 size_t i;
1319 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1321 gimple_omp_for_set_index (copy, i,
1322 gimple_omp_for_index (stmt, i));
1323 gimple_omp_for_set_initial (copy, i,
1324 gimple_omp_for_initial (stmt, i));
1325 gimple_omp_for_set_final (copy, i,
1326 gimple_omp_for_final (stmt, i));
1327 gimple_omp_for_set_incr (copy, i,
1328 gimple_omp_for_incr (stmt, i));
1329 gimple_omp_for_set_cond (copy, i,
1330 gimple_omp_for_cond (stmt, i));
1333 break;
1335 case GIMPLE_OMP_MASTER:
1336 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1337 copy = gimple_build_omp_master (s1);
1338 break;
1340 case GIMPLE_OMP_ORDERED:
1341 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1342 copy = gimple_build_omp_ordered (s1);
1343 break;
1345 case GIMPLE_OMP_SECTION:
1346 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1347 copy = gimple_build_omp_section (s1);
1348 break;
1350 case GIMPLE_OMP_SECTIONS:
1351 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1352 copy = gimple_build_omp_sections
1353 (s1, gimple_omp_sections_clauses (stmt));
1354 break;
1356 case GIMPLE_OMP_SINGLE:
1357 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1358 copy = gimple_build_omp_single
1359 (s1, gimple_omp_single_clauses (stmt));
1360 break;
1362 case GIMPLE_OMP_CRITICAL:
1363 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1364 copy
1365 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1366 break;
1368 default:
1369 gcc_unreachable ();
1372 else
1374 if (gimple_assign_copy_p (stmt)
1375 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1376 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1378 /* Here we handle statements that are not completely rewritten.
1379 First we detect some inlining-induced bogosities for
1380 discarding. */
1382 /* Some assignments VAR = VAR; don't generate any rtl code
1383 and thus don't count as variable modification. Avoid
1384 keeping bogosities like 0 = 0. */
1385 tree decl = gimple_assign_lhs (stmt), value;
1386 tree *n;
1388 n = (tree *) pointer_map_contains (id->decl_map, decl);
1389 if (n)
1391 value = *n;
1392 STRIP_TYPE_NOPS (value);
1393 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1394 return gimple_build_nop ();
1398 if (gimple_debug_bind_p (stmt))
1400 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1401 gimple_debug_bind_get_value (stmt),
1402 stmt);
1403 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1404 return copy;
1407 /* Create a new deep copy of the statement. */
1408 copy = gimple_copy (stmt);
1410 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1411 RESX and EH_DISPATCH. */
1412 if (id->eh_map)
1413 switch (gimple_code (copy))
1415 case GIMPLE_CALL:
1417 tree r, fndecl = gimple_call_fndecl (copy);
1418 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1419 switch (DECL_FUNCTION_CODE (fndecl))
1421 case BUILT_IN_EH_COPY_VALUES:
1422 r = gimple_call_arg (copy, 1);
1423 r = remap_eh_region_tree_nr (r, id);
1424 gimple_call_set_arg (copy, 1, r);
1425 /* FALLTHRU */
1427 case BUILT_IN_EH_POINTER:
1428 case BUILT_IN_EH_FILTER:
1429 r = gimple_call_arg (copy, 0);
1430 r = remap_eh_region_tree_nr (r, id);
1431 gimple_call_set_arg (copy, 0, r);
1432 break;
1434 default:
1435 break;
1438 /* Reset alias info if we didn't apply measures to
1439 keep it valid over inlining by setting DECL_PT_UID. */
1440 if (!id->src_cfun->gimple_df
1441 || !id->src_cfun->gimple_df->ipa_pta)
1442 gimple_call_reset_alias_info (copy);
1444 break;
1446 case GIMPLE_RESX:
1448 int r = gimple_resx_region (copy);
1449 r = remap_eh_region_nr (r, id);
1450 gimple_resx_set_region (copy, r);
1452 break;
1454 case GIMPLE_EH_DISPATCH:
1456 int r = gimple_eh_dispatch_region (copy);
1457 r = remap_eh_region_nr (r, id);
1458 gimple_eh_dispatch_set_region (copy, r);
1460 break;
1462 default:
1463 break;
1467 /* If STMT has a block defined, map it to the newly constructed
1468 block. When inlining we want statements without a block to
1469 appear in the block of the function call. */
1470 new_block = id->block;
1471 if (gimple_block (copy))
1473 tree *n;
1474 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1475 gcc_assert (n);
1476 new_block = *n;
1479 gimple_set_block (copy, new_block);
1481 if (gimple_debug_bind_p (copy))
1482 return copy;
1484 /* Remap all the operands in COPY. */
1485 memset (&wi, 0, sizeof (wi));
1486 wi.info = id;
1487 if (skip_first)
1488 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1489 else
1490 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1492 /* Clear the copied virtual operands. We are not remapping them here
1493 but are going to recreate them from scratch. */
1494 if (gimple_has_mem_ops (copy))
1496 gimple_set_vdef (copy, NULL_TREE);
1497 gimple_set_vuse (copy, NULL_TREE);
1500 return copy;
1504 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1505 later */
1507 static basic_block
1508 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1509 gcov_type count_scale)
1511 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1512 basic_block copy_basic_block;
1513 tree decl;
1514 gcov_type freq;
1515 basic_block prev;
1517 /* Search for previous copied basic block. */
1518 prev = bb->prev_bb;
1519 while (!prev->aux)
1520 prev = prev->prev_bb;
1522 /* create_basic_block() will append every new block to
1523 basic_block_info automatically. */
1524 copy_basic_block = create_basic_block (NULL, (void *) 0,
1525 (basic_block) prev->aux);
1526 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1528 /* We are going to rebuild frequencies from scratch. These values
1529 have just small importance to drive canonicalize_loop_headers. */
1530 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1532 /* We recompute frequencies after inlining, so this is quite safe. */
1533 if (freq > BB_FREQ_MAX)
1534 freq = BB_FREQ_MAX;
1535 copy_basic_block->frequency = freq;
1537 copy_gsi = gsi_start_bb (copy_basic_block);
1539 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1541 gimple stmt = gsi_stmt (gsi);
1542 gimple orig_stmt = stmt;
1544 id->regimplify = false;
1545 stmt = remap_gimple_stmt (stmt, id);
1546 if (gimple_nop_p (stmt))
1547 continue;
1549 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1550 seq_gsi = copy_gsi;
1552 /* With return slot optimization we can end up with
1553 non-gimple (foo *)&this->m, fix that here. */
1554 if (is_gimple_assign (stmt)
1555 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1556 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1558 tree new_rhs;
1559 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1560 gimple_assign_rhs1 (stmt),
1561 true, NULL, false,
1562 GSI_CONTINUE_LINKING);
1563 gimple_assign_set_rhs1 (stmt, new_rhs);
1564 id->regimplify = false;
1567 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1569 if (id->regimplify)
1570 gimple_regimplify_operands (stmt, &seq_gsi);
1572 /* If copy_basic_block has been empty at the start of this iteration,
1573 call gsi_start_bb again to get at the newly added statements. */
1574 if (gsi_end_p (copy_gsi))
1575 copy_gsi = gsi_start_bb (copy_basic_block);
1576 else
1577 gsi_next (&copy_gsi);
1579 /* Process the new statement. The call to gimple_regimplify_operands
1580 possibly turned the statement into multiple statements, we
1581 need to process all of them. */
1584 tree fn;
1586 stmt = gsi_stmt (copy_gsi);
1587 if (is_gimple_call (stmt)
1588 && gimple_call_va_arg_pack_p (stmt)
1589 && id->gimple_call)
1591 /* __builtin_va_arg_pack () should be replaced by
1592 all arguments corresponding to ... in the caller. */
1593 tree p;
1594 gimple new_call;
1595 VEC(tree, heap) *argarray;
1596 size_t nargs = gimple_call_num_args (id->gimple_call);
1597 size_t n;
1599 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1600 nargs--;
1602 /* Create the new array of arguments. */
1603 n = nargs + gimple_call_num_args (stmt);
1604 argarray = VEC_alloc (tree, heap, n);
1605 VEC_safe_grow (tree, heap, argarray, n);
1607 /* Copy all the arguments before '...' */
1608 memcpy (VEC_address (tree, argarray),
1609 gimple_call_arg_ptr (stmt, 0),
1610 gimple_call_num_args (stmt) * sizeof (tree));
1612 /* Append the arguments passed in '...' */
1613 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1614 gimple_call_arg_ptr (id->gimple_call, 0)
1615 + (gimple_call_num_args (id->gimple_call) - nargs),
1616 nargs * sizeof (tree));
1618 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1619 argarray);
1621 VEC_free (tree, heap, argarray);
1623 /* Copy all GIMPLE_CALL flags, location and block, except
1624 GF_CALL_VA_ARG_PACK. */
1625 gimple_call_copy_flags (new_call, stmt);
1626 gimple_call_set_va_arg_pack (new_call, false);
1627 gimple_set_location (new_call, gimple_location (stmt));
1628 gimple_set_block (new_call, gimple_block (stmt));
1629 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1631 gsi_replace (&copy_gsi, new_call, false);
1632 stmt = new_call;
1634 else if (is_gimple_call (stmt)
1635 && id->gimple_call
1636 && (decl = gimple_call_fndecl (stmt))
1637 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1638 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1640 /* __builtin_va_arg_pack_len () should be replaced by
1641 the number of anonymous arguments. */
1642 size_t nargs = gimple_call_num_args (id->gimple_call);
1643 tree count, p;
1644 gimple new_stmt;
1646 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1647 nargs--;
1649 count = build_int_cst (integer_type_node, nargs);
1650 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1651 gsi_replace (&copy_gsi, new_stmt, false);
1652 stmt = new_stmt;
1655 /* Statements produced by inlining can be unfolded, especially
1656 when we constant propagated some operands. We can't fold
1657 them right now for two reasons:
1658 1) folding require SSA_NAME_DEF_STMTs to be correct
1659 2) we can't change function calls to builtins.
1660 So we just mark statement for later folding. We mark
1661 all new statements, instead just statements that has changed
1662 by some nontrivial substitution so even statements made
1663 foldable indirectly are updated. If this turns out to be
1664 expensive, copy_body can be told to watch for nontrivial
1665 changes. */
1666 if (id->statements_to_fold)
1667 pointer_set_insert (id->statements_to_fold, stmt);
1669 /* We're duplicating a CALL_EXPR. Find any corresponding
1670 callgraph edges and update or duplicate them. */
1671 if (is_gimple_call (stmt))
1673 struct cgraph_edge *edge;
1674 int flags;
1676 switch (id->transform_call_graph_edges)
1678 case CB_CGE_DUPLICATE:
1679 edge = cgraph_edge (id->src_node, orig_stmt);
1680 if (edge)
1682 int edge_freq = edge->frequency;
1683 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1684 gimple_uid (stmt),
1685 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1686 true);
1687 /* We could also just rescale the frequency, but
1688 doing so would introduce roundoff errors and make
1689 verifier unhappy. */
1690 edge->frequency
1691 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1692 copy_basic_block);
1693 if (dump_file
1694 && profile_status_for_function (cfun) != PROFILE_ABSENT
1695 && (edge_freq > edge->frequency + 10
1696 || edge_freq < edge->frequency - 10))
1698 fprintf (dump_file, "Edge frequency estimated by "
1699 "cgraph %i diverge from inliner's estimate %i\n",
1700 edge_freq,
1701 edge->frequency);
1702 fprintf (dump_file,
1703 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1704 bb->index,
1705 bb->frequency,
1706 copy_basic_block->frequency);
1708 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1710 break;
1712 case CB_CGE_MOVE_CLONES:
1713 cgraph_set_call_stmt_including_clones (id->dst_node,
1714 orig_stmt, stmt);
1715 edge = cgraph_edge (id->dst_node, stmt);
1716 break;
1718 case CB_CGE_MOVE:
1719 edge = cgraph_edge (id->dst_node, orig_stmt);
1720 if (edge)
1721 cgraph_set_call_stmt (edge, stmt);
1722 break;
1724 default:
1725 gcc_unreachable ();
1728 /* Constant propagation on argument done during inlining
1729 may create new direct call. Produce an edge for it. */
1730 if ((!edge
1731 || (edge->indirect_inlining_edge
1732 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1733 && id->dst_node->analyzed
1734 && (fn = gimple_call_fndecl (stmt)) != NULL)
1736 struct cgraph_node *dest = cgraph_get_node (fn);
1738 /* We have missing edge in the callgraph. This can happen
1739 when previous inlining turned an indirect call into a
1740 direct call by constant propagating arguments or we are
1741 producing dead clone (for further cloning). In all
1742 other cases we hit a bug (incorrect node sharing is the
1743 most common reason for missing edges). */
1744 gcc_assert (dest->needed || !dest->analyzed
1745 || dest->address_taken
1746 || !id->src_node->analyzed
1747 || !id->dst_node->analyzed);
1748 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1749 cgraph_create_edge_including_clones
1750 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1751 compute_call_stmt_bb_frequency (id->dst_node->decl,
1752 copy_basic_block),
1753 CIF_ORIGINALLY_INDIRECT_CALL);
1754 else
1755 cgraph_create_edge (id->dst_node, dest, stmt,
1756 bb->count,
1757 compute_call_stmt_bb_frequency
1758 (id->dst_node->decl, copy_basic_block))->inline_failed
1759 = CIF_ORIGINALLY_INDIRECT_CALL;
1760 if (dump_file)
1762 fprintf (dump_file, "Created new direct edge to %s\n",
1763 cgraph_node_name (dest));
1767 flags = gimple_call_flags (stmt);
1768 if (flags & ECF_MAY_BE_ALLOCA)
1769 cfun->calls_alloca = true;
1770 if (flags & ECF_RETURNS_TWICE)
1771 cfun->calls_setjmp = true;
1774 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1775 id->eh_map, id->eh_lp_nr);
1777 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1779 ssa_op_iter i;
1780 tree def;
1782 find_new_referenced_vars (gsi_stmt (copy_gsi));
1783 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1784 if (TREE_CODE (def) == SSA_NAME)
1785 SSA_NAME_DEF_STMT (def) = stmt;
1788 gsi_next (&copy_gsi);
1790 while (!gsi_end_p (copy_gsi));
1792 copy_gsi = gsi_last_bb (copy_basic_block);
1795 return copy_basic_block;
1798 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1799 form is quite easy, since dominator relationship for old basic blocks does
1800 not change.
1802 There is however exception where inlining might change dominator relation
1803 across EH edges from basic block within inlined functions destinating
1804 to landing pads in function we inline into.
1806 The function fills in PHI_RESULTs of such PHI nodes if they refer
1807 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1808 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1809 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1810 set, and this means that there will be no overlapping live ranges
1811 for the underlying symbol.
1813 This might change in future if we allow redirecting of EH edges and
1814 we might want to change way build CFG pre-inlining to include
1815 all the possible edges then. */
1816 static void
1817 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1818 bool can_throw, bool nonlocal_goto)
1820 edge e;
1821 edge_iterator ei;
1823 FOR_EACH_EDGE (e, ei, bb->succs)
1824 if (!e->dest->aux
1825 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1827 gimple phi;
1828 gimple_stmt_iterator si;
1830 if (!nonlocal_goto)
1831 gcc_assert (e->flags & EDGE_EH);
1833 if (!can_throw)
1834 gcc_assert (!(e->flags & EDGE_EH));
1836 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1838 edge re;
1840 phi = gsi_stmt (si);
1842 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1843 gcc_assert (!e->dest->aux);
1845 gcc_assert ((e->flags & EDGE_EH)
1846 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1848 if (!is_gimple_reg (PHI_RESULT (phi)))
1850 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1851 continue;
1854 re = find_edge (ret_bb, e->dest);
1855 gcc_assert (re);
1856 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1857 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1859 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1860 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1866 /* Copy edges from BB into its copy constructed earlier, scale profile
1867 accordingly. Edges will be taken care of later. Assume aux
1868 pointers to point to the copies of each BB. Return true if any
1869 debug stmts are left after a statement that must end the basic block. */
1871 static bool
1872 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1874 basic_block new_bb = (basic_block) bb->aux;
1875 edge_iterator ei;
1876 edge old_edge;
1877 gimple_stmt_iterator si;
1878 int flags;
1879 bool need_debug_cleanup = false;
1881 /* Use the indices from the original blocks to create edges for the
1882 new ones. */
1883 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1884 if (!(old_edge->flags & EDGE_EH))
1886 edge new_edge;
1888 flags = old_edge->flags;
1890 /* Return edges do get a FALLTHRU flag when the get inlined. */
1891 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1892 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1893 flags |= EDGE_FALLTHRU;
1894 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1895 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1896 new_edge->probability = old_edge->probability;
1899 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1900 return false;
1902 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1904 gimple copy_stmt;
1905 bool can_throw, nonlocal_goto;
1907 copy_stmt = gsi_stmt (si);
1908 if (!is_gimple_debug (copy_stmt))
1910 update_stmt (copy_stmt);
1911 if (gimple_in_ssa_p (cfun))
1912 mark_symbols_for_renaming (copy_stmt);
1915 /* Do this before the possible split_block. */
1916 gsi_next (&si);
1918 /* If this tree could throw an exception, there are two
1919 cases where we need to add abnormal edge(s): the
1920 tree wasn't in a region and there is a "current
1921 region" in the caller; or the original tree had
1922 EH edges. In both cases split the block after the tree,
1923 and add abnormal edge(s) as needed; we need both
1924 those from the callee and the caller.
1925 We check whether the copy can throw, because the const
1926 propagation can change an INDIRECT_REF which throws
1927 into a COMPONENT_REF which doesn't. If the copy
1928 can throw, the original could also throw. */
1929 can_throw = stmt_can_throw_internal (copy_stmt);
1930 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1932 if (can_throw || nonlocal_goto)
1934 if (!gsi_end_p (si))
1936 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1937 gsi_next (&si);
1938 if (gsi_end_p (si))
1939 need_debug_cleanup = true;
1941 if (!gsi_end_p (si))
1942 /* Note that bb's predecessor edges aren't necessarily
1943 right at this point; split_block doesn't care. */
1945 edge e = split_block (new_bb, copy_stmt);
1947 new_bb = e->dest;
1948 new_bb->aux = e->src->aux;
1949 si = gsi_start_bb (new_bb);
1953 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1954 make_eh_dispatch_edges (copy_stmt);
1955 else if (can_throw)
1956 make_eh_edges (copy_stmt);
1958 if (nonlocal_goto)
1959 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1961 if ((can_throw || nonlocal_goto)
1962 && gimple_in_ssa_p (cfun))
1963 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1964 can_throw, nonlocal_goto);
1966 return need_debug_cleanup;
1969 /* Copy the PHIs. All blocks and edges are copied, some blocks
1970 was possibly split and new outgoing EH edges inserted.
1971 BB points to the block of original function and AUX pointers links
1972 the original and newly copied blocks. */
1974 static void
1975 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1977 basic_block const new_bb = (basic_block) bb->aux;
1978 edge_iterator ei;
1979 gimple phi;
1980 gimple_stmt_iterator si;
1981 edge new_edge;
1982 bool inserted = false;
1984 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1986 tree res, new_res;
1987 gimple new_phi;
1989 phi = gsi_stmt (si);
1990 res = PHI_RESULT (phi);
1991 new_res = res;
1992 if (is_gimple_reg (res))
1994 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1995 SSA_NAME_DEF_STMT (new_res)
1996 = new_phi = create_phi_node (new_res, new_bb);
1997 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1999 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2000 tree arg;
2001 tree new_arg;
2002 tree block = id->block;
2003 edge_iterator ei2;
2005 /* When doing partial cloning, we allow PHIs on the entry block
2006 as long as all the arguments are the same. Find any input
2007 edge to see argument to copy. */
2008 if (!old_edge)
2009 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2010 if (!old_edge->src->aux)
2011 break;
2013 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2014 new_arg = arg;
2015 id->block = NULL_TREE;
2016 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2017 id->block = block;
2018 gcc_assert (new_arg);
2019 /* With return slot optimization we can end up with
2020 non-gimple (foo *)&this->m, fix that here. */
2021 if (TREE_CODE (new_arg) != SSA_NAME
2022 && TREE_CODE (new_arg) != FUNCTION_DECL
2023 && !is_gimple_val (new_arg))
2025 gimple_seq stmts = NULL;
2026 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2027 gsi_insert_seq_on_edge (new_edge, stmts);
2028 inserted = true;
2030 add_phi_arg (new_phi, new_arg, new_edge,
2031 gimple_phi_arg_location_from_edge (phi, old_edge));
2036 /* Commit the delayed edge insertions. */
2037 if (inserted)
2038 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2039 gsi_commit_one_edge_insert (new_edge, NULL);
2043 /* Wrapper for remap_decl so it can be used as a callback. */
2045 static tree
2046 remap_decl_1 (tree decl, void *data)
2048 return remap_decl (decl, (copy_body_data *) data);
2051 /* Build struct function and associated datastructures for the new clone
2052 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2054 static void
2055 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2057 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2058 gcov_type count_scale;
2060 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2061 count_scale = (REG_BR_PROB_BASE * count
2062 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2063 else
2064 count_scale = REG_BR_PROB_BASE;
2066 /* Register specific tree functions. */
2067 gimple_register_cfg_hooks ();
2069 /* Get clean struct function. */
2070 push_struct_function (new_fndecl);
2072 /* We will rebuild these, so just sanity check that they are empty. */
2073 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2074 gcc_assert (cfun->local_decls == NULL);
2075 gcc_assert (cfun->cfg == NULL);
2076 gcc_assert (cfun->decl == new_fndecl);
2078 /* Copy items we preserve during cloning. */
2079 cfun->static_chain_decl = src_cfun->static_chain_decl;
2080 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2081 cfun->function_end_locus = src_cfun->function_end_locus;
2082 cfun->curr_properties = src_cfun->curr_properties;
2083 cfun->last_verified = src_cfun->last_verified;
2084 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2085 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2086 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2087 cfun->stdarg = src_cfun->stdarg;
2088 cfun->after_inlining = src_cfun->after_inlining;
2089 cfun->can_throw_non_call_exceptions
2090 = src_cfun->can_throw_non_call_exceptions;
2091 cfun->returns_struct = src_cfun->returns_struct;
2092 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2093 cfun->after_tree_profile = src_cfun->after_tree_profile;
2095 init_empty_tree_cfg ();
2097 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2098 ENTRY_BLOCK_PTR->count =
2099 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2100 REG_BR_PROB_BASE);
2101 ENTRY_BLOCK_PTR->frequency
2102 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2103 EXIT_BLOCK_PTR->count =
2104 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2105 REG_BR_PROB_BASE);
2106 EXIT_BLOCK_PTR->frequency =
2107 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2108 if (src_cfun->eh)
2109 init_eh_for_function ();
2111 if (src_cfun->gimple_df)
2113 init_tree_ssa (cfun);
2114 cfun->gimple_df->in_ssa_p = true;
2115 init_ssa_operands ();
2117 pop_cfun ();
2120 /* Helper function for copy_cfg_body. Move debug stmts from the end
2121 of NEW_BB to the beginning of successor basic blocks when needed. If the
2122 successor has multiple predecessors, reset them, otherwise keep
2123 their value. */
2125 static void
2126 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2128 edge e;
2129 edge_iterator ei;
2130 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2132 if (gsi_end_p (si)
2133 || gsi_one_before_end_p (si)
2134 || !(stmt_can_throw_internal (gsi_stmt (si))
2135 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2136 return;
2138 FOR_EACH_EDGE (e, ei, new_bb->succs)
2140 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2141 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2142 while (is_gimple_debug (gsi_stmt (ssi)))
2144 gimple stmt = gsi_stmt (ssi), new_stmt;
2145 tree var;
2146 tree value;
2148 /* For the last edge move the debug stmts instead of copying
2149 them. */
2150 if (ei_one_before_end_p (ei))
2152 si = ssi;
2153 gsi_prev (&ssi);
2154 if (!single_pred_p (e->dest))
2155 gimple_debug_bind_reset_value (stmt);
2156 gsi_remove (&si, false);
2157 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2158 continue;
2161 var = gimple_debug_bind_get_var (stmt);
2162 if (single_pred_p (e->dest))
2164 value = gimple_debug_bind_get_value (stmt);
2165 value = unshare_expr (value);
2167 else
2168 value = NULL_TREE;
2169 new_stmt = gimple_build_debug_bind (var, value, stmt);
2170 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2171 VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2172 gsi_prev (&ssi);
2177 /* Make a copy of the body of FN so that it can be inserted inline in
2178 another function. Walks FN via CFG, returns new fndecl. */
2180 static tree
2181 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2182 basic_block entry_block_map, basic_block exit_block_map,
2183 bitmap blocks_to_copy, basic_block new_entry)
2185 tree callee_fndecl = id->src_fn;
2186 /* Original cfun for the callee, doesn't change. */
2187 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2188 struct function *cfun_to_copy;
2189 basic_block bb;
2190 tree new_fndecl = NULL;
2191 bool need_debug_cleanup = false;
2192 gcov_type count_scale;
2193 int last;
2194 int incoming_frequency = 0;
2195 gcov_type incoming_count = 0;
2197 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2198 count_scale = (REG_BR_PROB_BASE * count
2199 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2200 else
2201 count_scale = REG_BR_PROB_BASE;
2203 /* Register specific tree functions. */
2204 gimple_register_cfg_hooks ();
2206 /* If we are inlining just region of the function, make sure to connect new entry
2207 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2208 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2209 probabilities of edges incoming from nonduplicated region. */
2210 if (new_entry)
2212 edge e;
2213 edge_iterator ei;
2215 FOR_EACH_EDGE (e, ei, new_entry->preds)
2216 if (!e->src->aux)
2218 incoming_frequency += EDGE_FREQUENCY (e);
2219 incoming_count += e->count;
2221 incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2222 incoming_frequency
2223 = incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2224 ENTRY_BLOCK_PTR->count = incoming_count;
2225 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2228 /* Must have a CFG here at this point. */
2229 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2230 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2232 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2234 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2235 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2236 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2237 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2239 /* Duplicate any exception-handling regions. */
2240 if (cfun->eh)
2241 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2242 remap_decl_1, id);
2244 /* Use aux pointers to map the original blocks to copy. */
2245 FOR_EACH_BB_FN (bb, cfun_to_copy)
2246 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2248 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2249 bb->aux = new_bb;
2250 new_bb->aux = bb;
2253 last = last_basic_block;
2255 /* Now that we've duplicated the blocks, duplicate their edges. */
2256 FOR_ALL_BB_FN (bb, cfun_to_copy)
2257 if (!blocks_to_copy
2258 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2259 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2261 if (new_entry)
2263 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2264 e->probability = REG_BR_PROB_BASE;
2265 e->count = incoming_count;
2268 if (gimple_in_ssa_p (cfun))
2269 FOR_ALL_BB_FN (bb, cfun_to_copy)
2270 if (!blocks_to_copy
2271 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2272 copy_phis_for_bb (bb, id);
2274 FOR_ALL_BB_FN (bb, cfun_to_copy)
2275 if (bb->aux)
2277 if (need_debug_cleanup
2278 && bb->index != ENTRY_BLOCK
2279 && bb->index != EXIT_BLOCK)
2280 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2281 ((basic_block)bb->aux)->aux = NULL;
2282 bb->aux = NULL;
2285 /* Zero out AUX fields of newly created block during EH edge
2286 insertion. */
2287 for (; last < last_basic_block; last++)
2289 if (need_debug_cleanup)
2290 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2291 BASIC_BLOCK (last)->aux = NULL;
2293 entry_block_map->aux = NULL;
2294 exit_block_map->aux = NULL;
2296 if (id->eh_map)
2298 pointer_map_destroy (id->eh_map);
2299 id->eh_map = NULL;
2302 return new_fndecl;
2305 /* Copy the debug STMT using ID. We deal with these statements in a
2306 special way: if any variable in their VALUE expression wasn't
2307 remapped yet, we won't remap it, because that would get decl uids
2308 out of sync, causing codegen differences between -g and -g0. If
2309 this arises, we drop the VALUE expression altogether. */
2311 static void
2312 copy_debug_stmt (gimple stmt, copy_body_data *id)
2314 tree t, *n;
2315 struct walk_stmt_info wi;
2317 t = id->block;
2318 if (gimple_block (stmt))
2320 tree *n;
2321 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2322 if (n)
2323 t = *n;
2325 gimple_set_block (stmt, t);
2327 /* Remap all the operands in COPY. */
2328 memset (&wi, 0, sizeof (wi));
2329 wi.info = id;
2331 processing_debug_stmt = 1;
2333 t = gimple_debug_bind_get_var (stmt);
2335 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2336 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2338 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2339 t = *n;
2341 else if (TREE_CODE (t) == VAR_DECL
2342 && !TREE_STATIC (t)
2343 && gimple_in_ssa_p (cfun)
2344 && !pointer_map_contains (id->decl_map, t)
2345 && !var_ann (t))
2346 /* T is a non-localized variable. */;
2347 else
2348 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2350 gimple_debug_bind_set_var (stmt, t);
2352 if (gimple_debug_bind_has_value_p (stmt))
2353 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2354 remap_gimple_op_r, &wi, NULL);
2356 /* Punt if any decl couldn't be remapped. */
2357 if (processing_debug_stmt < 0)
2358 gimple_debug_bind_reset_value (stmt);
2360 processing_debug_stmt = 0;
2362 update_stmt (stmt);
2363 if (gimple_in_ssa_p (cfun))
2364 mark_symbols_for_renaming (stmt);
2367 /* Process deferred debug stmts. In order to give values better odds
2368 of being successfully remapped, we delay the processing of debug
2369 stmts until all other stmts that might require remapping are
2370 processed. */
2372 static void
2373 copy_debug_stmts (copy_body_data *id)
2375 size_t i;
2376 gimple stmt;
2378 if (!id->debug_stmts)
2379 return;
2381 FOR_EACH_VEC_ELT (gimple, id->debug_stmts, i, stmt)
2382 copy_debug_stmt (stmt, id);
2384 VEC_free (gimple, heap, id->debug_stmts);
2387 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2388 another function. */
2390 static tree
2391 copy_tree_body (copy_body_data *id)
2393 tree fndecl = id->src_fn;
2394 tree body = DECL_SAVED_TREE (fndecl);
2396 walk_tree (&body, copy_tree_body_r, id, NULL);
2398 return body;
2401 /* Make a copy of the body of FN so that it can be inserted inline in
2402 another function. */
2404 static tree
2405 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2406 basic_block entry_block_map, basic_block exit_block_map,
2407 bitmap blocks_to_copy, basic_block new_entry)
2409 tree fndecl = id->src_fn;
2410 tree body;
2412 /* If this body has a CFG, walk CFG and copy. */
2413 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2414 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2415 blocks_to_copy, new_entry);
2416 copy_debug_stmts (id);
2418 return body;
2421 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2422 defined in function FN, or of a data member thereof. */
2424 static bool
2425 self_inlining_addr_expr (tree value, tree fn)
2427 tree var;
2429 if (TREE_CODE (value) != ADDR_EXPR)
2430 return false;
2432 var = get_base_address (TREE_OPERAND (value, 0));
2434 return var && auto_var_in_fn_p (var, fn);
2437 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2438 lexical block and line number information from base_stmt, if given,
2439 or from the last stmt of the block otherwise. */
2441 static gimple
2442 insert_init_debug_bind (copy_body_data *id,
2443 basic_block bb, tree var, tree value,
2444 gimple base_stmt)
2446 gimple note;
2447 gimple_stmt_iterator gsi;
2448 tree tracked_var;
2450 if (!gimple_in_ssa_p (id->src_cfun))
2451 return NULL;
2453 if (!MAY_HAVE_DEBUG_STMTS)
2454 return NULL;
2456 tracked_var = target_for_debug_bind (var);
2457 if (!tracked_var)
2458 return NULL;
2460 if (bb)
2462 gsi = gsi_last_bb (bb);
2463 if (!base_stmt && !gsi_end_p (gsi))
2464 base_stmt = gsi_stmt (gsi);
2467 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2469 if (bb)
2471 if (!gsi_end_p (gsi))
2472 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2473 else
2474 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2477 return note;
2480 static void
2481 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2483 /* If VAR represents a zero-sized variable, it's possible that the
2484 assignment statement may result in no gimple statements. */
2485 if (init_stmt)
2487 gimple_stmt_iterator si = gsi_last_bb (bb);
2489 /* We can end up with init statements that store to a non-register
2490 from a rhs with a conversion. Handle that here by forcing the
2491 rhs into a temporary. gimple_regimplify_operands is not
2492 prepared to do this for us. */
2493 if (!is_gimple_debug (init_stmt)
2494 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2495 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2496 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2498 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2499 gimple_expr_type (init_stmt),
2500 gimple_assign_rhs1 (init_stmt));
2501 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2502 GSI_NEW_STMT);
2503 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2504 gimple_assign_set_rhs1 (init_stmt, rhs);
2506 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2507 gimple_regimplify_operands (init_stmt, &si);
2508 mark_symbols_for_renaming (init_stmt);
2510 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2512 tree var, def = gimple_assign_lhs (init_stmt);
2514 if (TREE_CODE (def) == SSA_NAME)
2515 var = SSA_NAME_VAR (def);
2516 else
2517 var = def;
2519 insert_init_debug_bind (id, bb, var, def, init_stmt);
2524 /* Initialize parameter P with VALUE. If needed, produce init statement
2525 at the end of BB. When BB is NULL, we return init statement to be
2526 output later. */
2527 static gimple
2528 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2529 basic_block bb, tree *vars)
2531 gimple init_stmt = NULL;
2532 tree var;
2533 tree rhs = value;
2534 tree def = (gimple_in_ssa_p (cfun)
2535 ? gimple_default_def (id->src_cfun, p) : NULL);
2537 if (value
2538 && value != error_mark_node
2539 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2541 if (fold_convertible_p (TREE_TYPE (p), value))
2542 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2543 else
2544 /* ??? For valid (GIMPLE) programs we should not end up here.
2545 Still if something has gone wrong and we end up with truly
2546 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2547 to not leak invalid GIMPLE to the following passes. */
2548 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2551 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2552 here since the type of this decl must be visible to the calling
2553 function. */
2554 var = copy_decl_to_var (p, id);
2556 /* We're actually using the newly-created var. */
2557 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2558 add_referenced_var (var);
2560 /* Declare this new variable. */
2561 DECL_CHAIN (var) = *vars;
2562 *vars = var;
2564 /* Make gimplifier happy about this variable. */
2565 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2567 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2568 we would not need to create a new variable here at all, if it
2569 weren't for debug info. Still, we can just use the argument
2570 value. */
2571 if (TREE_READONLY (p)
2572 && !TREE_ADDRESSABLE (p)
2573 && value && !TREE_SIDE_EFFECTS (value)
2574 && !def)
2576 /* We may produce non-gimple trees by adding NOPs or introduce
2577 invalid sharing when operand is not really constant.
2578 It is not big deal to prohibit constant propagation here as
2579 we will constant propagate in DOM1 pass anyway. */
2580 if (is_gimple_min_invariant (value)
2581 && useless_type_conversion_p (TREE_TYPE (p),
2582 TREE_TYPE (value))
2583 /* We have to be very careful about ADDR_EXPR. Make sure
2584 the base variable isn't a local variable of the inlined
2585 function, e.g., when doing recursive inlining, direct or
2586 mutually-recursive or whatever, which is why we don't
2587 just test whether fn == current_function_decl. */
2588 && ! self_inlining_addr_expr (value, fn))
2590 insert_decl_map (id, p, value);
2591 insert_debug_decl_map (id, p, var);
2592 return insert_init_debug_bind (id, bb, var, value, NULL);
2596 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2597 that way, when the PARM_DECL is encountered, it will be
2598 automatically replaced by the VAR_DECL. */
2599 insert_decl_map (id, p, var);
2601 /* Even if P was TREE_READONLY, the new VAR should not be.
2602 In the original code, we would have constructed a
2603 temporary, and then the function body would have never
2604 changed the value of P. However, now, we will be
2605 constructing VAR directly. The constructor body may
2606 change its value multiple times as it is being
2607 constructed. Therefore, it must not be TREE_READONLY;
2608 the back-end assumes that TREE_READONLY variable is
2609 assigned to only once. */
2610 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2611 TREE_READONLY (var) = 0;
2613 /* If there is no setup required and we are in SSA, take the easy route
2614 replacing all SSA names representing the function parameter by the
2615 SSA name passed to function.
2617 We need to construct map for the variable anyway as it might be used
2618 in different SSA names when parameter is set in function.
2620 Do replacement at -O0 for const arguments replaced by constant.
2621 This is important for builtin_constant_p and other construct requiring
2622 constant argument to be visible in inlined function body. */
2623 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2624 && (optimize
2625 || (TREE_READONLY (p)
2626 && is_gimple_min_invariant (rhs)))
2627 && (TREE_CODE (rhs) == SSA_NAME
2628 || is_gimple_min_invariant (rhs))
2629 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2631 insert_decl_map (id, def, rhs);
2632 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2635 /* If the value of argument is never used, don't care about initializing
2636 it. */
2637 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2639 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2640 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2643 /* Initialize this VAR_DECL from the equivalent argument. Convert
2644 the argument to the proper type in case it was promoted. */
2645 if (value)
2647 if (rhs == error_mark_node)
2649 insert_decl_map (id, p, var);
2650 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2653 STRIP_USELESS_TYPE_CONVERSION (rhs);
2655 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2656 keep our trees in gimple form. */
2657 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2659 def = remap_ssa_name (def, id);
2660 init_stmt = gimple_build_assign (def, rhs);
2661 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2662 set_default_def (var, NULL);
2664 else
2665 init_stmt = gimple_build_assign (var, rhs);
2667 if (bb && init_stmt)
2668 insert_init_stmt (id, bb, init_stmt);
2670 return init_stmt;
2673 /* Generate code to initialize the parameters of the function at the
2674 top of the stack in ID from the GIMPLE_CALL STMT. */
2676 static void
2677 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2678 tree fn, basic_block bb)
2680 tree parms;
2681 size_t i;
2682 tree p;
2683 tree vars = NULL_TREE;
2684 tree static_chain = gimple_call_chain (stmt);
2686 /* Figure out what the parameters are. */
2687 parms = DECL_ARGUMENTS (fn);
2689 /* Loop through the parameter declarations, replacing each with an
2690 equivalent VAR_DECL, appropriately initialized. */
2691 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2693 tree val;
2694 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2695 setup_one_parameter (id, p, val, fn, bb, &vars);
2697 /* After remapping parameters remap their types. This has to be done
2698 in a second loop over all parameters to appropriately remap
2699 variable sized arrays when the size is specified in a
2700 parameter following the array. */
2701 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2703 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2704 if (varp
2705 && TREE_CODE (*varp) == VAR_DECL)
2707 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2708 ? gimple_default_def (id->src_cfun, p) : NULL);
2709 tree var = *varp;
2710 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2711 /* Also remap the default definition if it was remapped
2712 to the default definition of the parameter replacement
2713 by the parameter setup. */
2714 if (def)
2716 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2717 if (defp
2718 && TREE_CODE (*defp) == SSA_NAME
2719 && SSA_NAME_VAR (*defp) == var)
2720 TREE_TYPE (*defp) = TREE_TYPE (var);
2725 /* Initialize the static chain. */
2726 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2727 gcc_assert (fn != current_function_decl);
2728 if (p)
2730 /* No static chain? Seems like a bug in tree-nested.c. */
2731 gcc_assert (static_chain);
2733 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2736 declare_inline_vars (id->block, vars);
2740 /* Declare a return variable to replace the RESULT_DECL for the
2741 function we are calling. An appropriate DECL_STMT is returned.
2742 The USE_STMT is filled to contain a use of the declaration to
2743 indicate the return value of the function.
2745 RETURN_SLOT, if non-null is place where to store the result. It
2746 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2747 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2749 The return value is a (possibly null) value that holds the result
2750 as seen by the caller. */
2752 static tree
2753 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2754 basic_block entry_bb)
2756 tree callee = id->src_fn;
2757 tree result = DECL_RESULT (callee);
2758 tree callee_type = TREE_TYPE (result);
2759 tree caller_type;
2760 tree var, use;
2762 /* Handle type-mismatches in the function declaration return type
2763 vs. the call expression. */
2764 if (modify_dest)
2765 caller_type = TREE_TYPE (modify_dest);
2766 else
2767 caller_type = TREE_TYPE (TREE_TYPE (callee));
2769 /* We don't need to do anything for functions that don't return
2770 anything. */
2771 if (!result || VOID_TYPE_P (callee_type))
2772 return NULL_TREE;
2774 /* If there was a return slot, then the return value is the
2775 dereferenced address of that object. */
2776 if (return_slot)
2778 /* The front end shouldn't have used both return_slot and
2779 a modify expression. */
2780 gcc_assert (!modify_dest);
2781 if (DECL_BY_REFERENCE (result))
2783 tree return_slot_addr = build_fold_addr_expr (return_slot);
2784 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2786 /* We are going to construct *&return_slot and we can't do that
2787 for variables believed to be not addressable.
2789 FIXME: This check possibly can match, because values returned
2790 via return slot optimization are not believed to have address
2791 taken by alias analysis. */
2792 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2793 var = return_slot_addr;
2795 else
2797 var = return_slot;
2798 gcc_assert (TREE_CODE (var) != SSA_NAME);
2799 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2801 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2802 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2803 && !DECL_GIMPLE_REG_P (result)
2804 && DECL_P (var))
2805 DECL_GIMPLE_REG_P (var) = 0;
2806 use = NULL;
2807 goto done;
2810 /* All types requiring non-trivial constructors should have been handled. */
2811 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2813 /* Attempt to avoid creating a new temporary variable. */
2814 if (modify_dest
2815 && TREE_CODE (modify_dest) != SSA_NAME)
2817 bool use_it = false;
2819 /* We can't use MODIFY_DEST if there's type promotion involved. */
2820 if (!useless_type_conversion_p (callee_type, caller_type))
2821 use_it = false;
2823 /* ??? If we're assigning to a variable sized type, then we must
2824 reuse the destination variable, because we've no good way to
2825 create variable sized temporaries at this point. */
2826 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2827 use_it = true;
2829 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2830 reuse it as the result of the call directly. Don't do this if
2831 it would promote MODIFY_DEST to addressable. */
2832 else if (TREE_ADDRESSABLE (result))
2833 use_it = false;
2834 else
2836 tree base_m = get_base_address (modify_dest);
2838 /* If the base isn't a decl, then it's a pointer, and we don't
2839 know where that's going to go. */
2840 if (!DECL_P (base_m))
2841 use_it = false;
2842 else if (is_global_var (base_m))
2843 use_it = false;
2844 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2845 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2846 && !DECL_GIMPLE_REG_P (result)
2847 && DECL_GIMPLE_REG_P (base_m))
2848 use_it = false;
2849 else if (!TREE_ADDRESSABLE (base_m))
2850 use_it = true;
2853 if (use_it)
2855 var = modify_dest;
2856 use = NULL;
2857 goto done;
2861 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2863 var = copy_result_decl_to_var (result, id);
2864 if (gimple_in_ssa_p (cfun))
2865 add_referenced_var (var);
2867 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2869 /* Do not have the rest of GCC warn about this variable as it should
2870 not be visible to the user. */
2871 TREE_NO_WARNING (var) = 1;
2873 declare_inline_vars (id->block, var);
2875 /* Build the use expr. If the return type of the function was
2876 promoted, convert it back to the expected type. */
2877 use = var;
2878 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2879 use = fold_convert (caller_type, var);
2881 STRIP_USELESS_TYPE_CONVERSION (use);
2883 if (DECL_BY_REFERENCE (result))
2885 TREE_ADDRESSABLE (var) = 1;
2886 var = build_fold_addr_expr (var);
2889 done:
2890 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2891 way, when the RESULT_DECL is encountered, it will be
2892 automatically replaced by the VAR_DECL.
2894 When returning by reference, ensure that RESULT_DECL remaps to
2895 gimple_val. */
2896 if (DECL_BY_REFERENCE (result)
2897 && !is_gimple_val (var))
2899 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
2900 if (gimple_in_ssa_p (id->src_cfun))
2901 add_referenced_var (temp);
2902 insert_decl_map (id, result, temp);
2903 /* When RESULT_DECL is in SSA form, we need to use it's default_def
2904 SSA_NAME. */
2905 if (gimple_in_ssa_p (id->src_cfun) && gimple_default_def (id->src_cfun, result))
2906 temp = remap_ssa_name (gimple_default_def (id->src_cfun, result), id);
2907 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
2909 else
2910 insert_decl_map (id, result, var);
2912 /* Remember this so we can ignore it in remap_decls. */
2913 id->retvar = var;
2915 return use;
2918 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2919 to a local label. */
2921 static tree
2922 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2924 tree node = *nodep;
2925 tree fn = (tree) fnp;
2927 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2928 return node;
2930 if (TYPE_P (node))
2931 *walk_subtrees = 0;
2933 return NULL_TREE;
2936 /* Determine if the function can be copied. If so return NULL. If
2937 not return a string describng the reason for failure. */
2939 static const char *
2940 copy_forbidden (struct function *fun, tree fndecl)
2942 const char *reason = fun->cannot_be_copied_reason;
2943 tree decl;
2944 unsigned ix;
2946 /* Only examine the function once. */
2947 if (fun->cannot_be_copied_set)
2948 return reason;
2950 /* We cannot copy a function that receives a non-local goto
2951 because we cannot remap the destination label used in the
2952 function that is performing the non-local goto. */
2953 /* ??? Actually, this should be possible, if we work at it.
2954 No doubt there's just a handful of places that simply
2955 assume it doesn't happen and don't substitute properly. */
2956 if (fun->has_nonlocal_label)
2958 reason = G_("function %q+F can never be copied "
2959 "because it receives a non-local goto");
2960 goto fail;
2963 FOR_EACH_LOCAL_DECL (fun, ix, decl)
2964 if (TREE_CODE (decl) == VAR_DECL
2965 && TREE_STATIC (decl)
2966 && !DECL_EXTERNAL (decl)
2967 && DECL_INITIAL (decl)
2968 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2969 has_label_address_in_static_1,
2970 fndecl))
2972 reason = G_("function %q+F can never be copied because it saves "
2973 "address of local label in a static variable");
2974 goto fail;
2977 fail:
2978 fun->cannot_be_copied_reason = reason;
2979 fun->cannot_be_copied_set = true;
2980 return reason;
2984 static const char *inline_forbidden_reason;
2986 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2987 iff a function can not be inlined. Also sets the reason why. */
2989 static tree
2990 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2991 struct walk_stmt_info *wip)
2993 tree fn = (tree) wip->info;
2994 tree t;
2995 gimple stmt = gsi_stmt (*gsi);
2997 switch (gimple_code (stmt))
2999 case GIMPLE_CALL:
3000 /* Refuse to inline alloca call unless user explicitly forced so as
3001 this may change program's memory overhead drastically when the
3002 function using alloca is called in loop. In GCC present in
3003 SPEC2000 inlining into schedule_block cause it to require 2GB of
3004 RAM instead of 256MB. Don't do so for alloca calls emitted for
3005 VLA objects as those can't cause unbounded growth (they're always
3006 wrapped inside stack_save/stack_restore regions. */
3007 if (gimple_alloca_call_p (stmt)
3008 && !gimple_call_alloca_for_var_p (stmt)
3009 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3011 inline_forbidden_reason
3012 = G_("function %q+F can never be inlined because it uses "
3013 "alloca (override using the always_inline attribute)");
3014 *handled_ops_p = true;
3015 return fn;
3018 t = gimple_call_fndecl (stmt);
3019 if (t == NULL_TREE)
3020 break;
3022 /* We cannot inline functions that call setjmp. */
3023 if (setjmp_call_p (t))
3025 inline_forbidden_reason
3026 = G_("function %q+F can never be inlined because it uses setjmp");
3027 *handled_ops_p = true;
3028 return t;
3031 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3032 switch (DECL_FUNCTION_CODE (t))
3034 /* We cannot inline functions that take a variable number of
3035 arguments. */
3036 case BUILT_IN_VA_START:
3037 case BUILT_IN_NEXT_ARG:
3038 case BUILT_IN_VA_END:
3039 inline_forbidden_reason
3040 = G_("function %q+F can never be inlined because it "
3041 "uses variable argument lists");
3042 *handled_ops_p = true;
3043 return t;
3045 case BUILT_IN_LONGJMP:
3046 /* We can't inline functions that call __builtin_longjmp at
3047 all. The non-local goto machinery really requires the
3048 destination be in a different function. If we allow the
3049 function calling __builtin_longjmp to be inlined into the
3050 function calling __builtin_setjmp, Things will Go Awry. */
3051 inline_forbidden_reason
3052 = G_("function %q+F can never be inlined because "
3053 "it uses setjmp-longjmp exception handling");
3054 *handled_ops_p = true;
3055 return t;
3057 case BUILT_IN_NONLOCAL_GOTO:
3058 /* Similarly. */
3059 inline_forbidden_reason
3060 = G_("function %q+F can never be inlined because "
3061 "it uses non-local goto");
3062 *handled_ops_p = true;
3063 return t;
3065 case BUILT_IN_RETURN:
3066 case BUILT_IN_APPLY_ARGS:
3067 /* If a __builtin_apply_args caller would be inlined,
3068 it would be saving arguments of the function it has
3069 been inlined into. Similarly __builtin_return would
3070 return from the function the inline has been inlined into. */
3071 inline_forbidden_reason
3072 = G_("function %q+F can never be inlined because "
3073 "it uses __builtin_return or __builtin_apply_args");
3074 *handled_ops_p = true;
3075 return t;
3077 default:
3078 break;
3080 break;
3082 case GIMPLE_GOTO:
3083 t = gimple_goto_dest (stmt);
3085 /* We will not inline a function which uses computed goto. The
3086 addresses of its local labels, which may be tucked into
3087 global storage, are of course not constant across
3088 instantiations, which causes unexpected behavior. */
3089 if (TREE_CODE (t) != LABEL_DECL)
3091 inline_forbidden_reason
3092 = G_("function %q+F can never be inlined "
3093 "because it contains a computed goto");
3094 *handled_ops_p = true;
3095 return t;
3097 break;
3099 default:
3100 break;
3103 *handled_ops_p = false;
3104 return NULL_TREE;
3107 /* Return true if FNDECL is a function that cannot be inlined into
3108 another one. */
3110 static bool
3111 inline_forbidden_p (tree fndecl)
3113 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3114 struct walk_stmt_info wi;
3115 struct pointer_set_t *visited_nodes;
3116 basic_block bb;
3117 bool forbidden_p = false;
3119 /* First check for shared reasons not to copy the code. */
3120 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3121 if (inline_forbidden_reason != NULL)
3122 return true;
3124 /* Next, walk the statements of the function looking for
3125 constraucts we can't handle, or are non-optimal for inlining. */
3126 visited_nodes = pointer_set_create ();
3127 memset (&wi, 0, sizeof (wi));
3128 wi.info = (void *) fndecl;
3129 wi.pset = visited_nodes;
3131 FOR_EACH_BB_FN (bb, fun)
3133 gimple ret;
3134 gimple_seq seq = bb_seq (bb);
3135 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3136 forbidden_p = (ret != NULL);
3137 if (forbidden_p)
3138 break;
3141 pointer_set_destroy (visited_nodes);
3142 return forbidden_p;
3145 /* Returns nonzero if FN is a function that does not have any
3146 fundamental inline blocking properties. */
3148 bool
3149 tree_inlinable_function_p (tree fn)
3151 bool inlinable = true;
3152 bool do_warning;
3153 tree always_inline;
3155 /* If we've already decided this function shouldn't be inlined,
3156 there's no need to check again. */
3157 if (DECL_UNINLINABLE (fn))
3158 return false;
3160 /* We only warn for functions declared `inline' by the user. */
3161 do_warning = (warn_inline
3162 && DECL_DECLARED_INLINE_P (fn)
3163 && !DECL_NO_INLINE_WARNING_P (fn)
3164 && !DECL_IN_SYSTEM_HEADER (fn));
3166 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3168 if (flag_no_inline
3169 && always_inline == NULL)
3171 if (do_warning)
3172 warning (OPT_Winline, "function %q+F can never be inlined because it "
3173 "is suppressed using -fno-inline", fn);
3174 inlinable = false;
3177 else if (!function_attribute_inlinable_p (fn))
3179 if (do_warning)
3180 warning (OPT_Winline, "function %q+F can never be inlined because it "
3181 "uses attributes conflicting with inlining", fn);
3182 inlinable = false;
3185 else if (inline_forbidden_p (fn))
3187 /* See if we should warn about uninlinable functions. Previously,
3188 some of these warnings would be issued while trying to expand
3189 the function inline, but that would cause multiple warnings
3190 about functions that would for example call alloca. But since
3191 this a property of the function, just one warning is enough.
3192 As a bonus we can now give more details about the reason why a
3193 function is not inlinable. */
3194 if (always_inline)
3195 sorry (inline_forbidden_reason, fn);
3196 else if (do_warning)
3197 warning (OPT_Winline, inline_forbidden_reason, fn);
3199 inlinable = false;
3202 /* Squirrel away the result so that we don't have to check again. */
3203 DECL_UNINLINABLE (fn) = !inlinable;
3205 return inlinable;
3208 /* Estimate the cost of a memory move. Use machine dependent
3209 word size and take possible memcpy call into account. */
3212 estimate_move_cost (tree type)
3214 HOST_WIDE_INT size;
3216 gcc_assert (!VOID_TYPE_P (type));
3218 if (TREE_CODE (type) == VECTOR_TYPE)
3220 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3221 enum machine_mode simd
3222 = targetm.vectorize.preferred_simd_mode (inner);
3223 int simd_mode_size = GET_MODE_SIZE (simd);
3224 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3225 / simd_mode_size);
3228 size = int_size_in_bytes (type);
3230 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3231 /* Cost of a memcpy call, 3 arguments and the call. */
3232 return 4;
3233 else
3234 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3237 /* Returns cost of operation CODE, according to WEIGHTS */
3239 static int
3240 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3241 tree op1 ATTRIBUTE_UNUSED, tree op2)
3243 switch (code)
3245 /* These are "free" conversions, or their presumed cost
3246 is folded into other operations. */
3247 case RANGE_EXPR:
3248 CASE_CONVERT:
3249 case COMPLEX_EXPR:
3250 case PAREN_EXPR:
3251 case VIEW_CONVERT_EXPR:
3252 return 0;
3254 /* Assign cost of 1 to usual operations.
3255 ??? We may consider mapping RTL costs to this. */
3256 case COND_EXPR:
3257 case VEC_COND_EXPR:
3259 case PLUS_EXPR:
3260 case POINTER_PLUS_EXPR:
3261 case MINUS_EXPR:
3262 case MULT_EXPR:
3263 case FMA_EXPR:
3265 case ADDR_SPACE_CONVERT_EXPR:
3266 case FIXED_CONVERT_EXPR:
3267 case FIX_TRUNC_EXPR:
3269 case NEGATE_EXPR:
3270 case FLOAT_EXPR:
3271 case MIN_EXPR:
3272 case MAX_EXPR:
3273 case ABS_EXPR:
3275 case LSHIFT_EXPR:
3276 case RSHIFT_EXPR:
3277 case LROTATE_EXPR:
3278 case RROTATE_EXPR:
3279 case VEC_LSHIFT_EXPR:
3280 case VEC_RSHIFT_EXPR:
3282 case BIT_IOR_EXPR:
3283 case BIT_XOR_EXPR:
3284 case BIT_AND_EXPR:
3285 case BIT_NOT_EXPR:
3287 case TRUTH_ANDIF_EXPR:
3288 case TRUTH_ORIF_EXPR:
3289 case TRUTH_AND_EXPR:
3290 case TRUTH_OR_EXPR:
3291 case TRUTH_XOR_EXPR:
3292 case TRUTH_NOT_EXPR:
3294 case LT_EXPR:
3295 case LE_EXPR:
3296 case GT_EXPR:
3297 case GE_EXPR:
3298 case EQ_EXPR:
3299 case NE_EXPR:
3300 case ORDERED_EXPR:
3301 case UNORDERED_EXPR:
3303 case UNLT_EXPR:
3304 case UNLE_EXPR:
3305 case UNGT_EXPR:
3306 case UNGE_EXPR:
3307 case UNEQ_EXPR:
3308 case LTGT_EXPR:
3310 case CONJ_EXPR:
3312 case PREDECREMENT_EXPR:
3313 case PREINCREMENT_EXPR:
3314 case POSTDECREMENT_EXPR:
3315 case POSTINCREMENT_EXPR:
3317 case REALIGN_LOAD_EXPR:
3319 case REDUC_MAX_EXPR:
3320 case REDUC_MIN_EXPR:
3321 case REDUC_PLUS_EXPR:
3322 case WIDEN_SUM_EXPR:
3323 case WIDEN_MULT_EXPR:
3324 case DOT_PROD_EXPR:
3325 case WIDEN_MULT_PLUS_EXPR:
3326 case WIDEN_MULT_MINUS_EXPR:
3328 case VEC_WIDEN_MULT_HI_EXPR:
3329 case VEC_WIDEN_MULT_LO_EXPR:
3330 case VEC_UNPACK_HI_EXPR:
3331 case VEC_UNPACK_LO_EXPR:
3332 case VEC_UNPACK_FLOAT_HI_EXPR:
3333 case VEC_UNPACK_FLOAT_LO_EXPR:
3334 case VEC_PACK_TRUNC_EXPR:
3335 case VEC_PACK_SAT_EXPR:
3336 case VEC_PACK_FIX_TRUNC_EXPR:
3337 case VEC_EXTRACT_EVEN_EXPR:
3338 case VEC_EXTRACT_ODD_EXPR:
3339 case VEC_INTERLEAVE_HIGH_EXPR:
3340 case VEC_INTERLEAVE_LOW_EXPR:
3342 return 1;
3344 /* Few special cases of expensive operations. This is useful
3345 to avoid inlining on functions having too many of these. */
3346 case TRUNC_DIV_EXPR:
3347 case CEIL_DIV_EXPR:
3348 case FLOOR_DIV_EXPR:
3349 case ROUND_DIV_EXPR:
3350 case EXACT_DIV_EXPR:
3351 case TRUNC_MOD_EXPR:
3352 case CEIL_MOD_EXPR:
3353 case FLOOR_MOD_EXPR:
3354 case ROUND_MOD_EXPR:
3355 case RDIV_EXPR:
3356 if (TREE_CODE (op2) != INTEGER_CST)
3357 return weights->div_mod_cost;
3358 return 1;
3360 default:
3361 /* We expect a copy assignment with no operator. */
3362 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3363 return 0;
3368 /* Estimate number of instructions that will be created by expanding
3369 the statements in the statement sequence STMTS.
3370 WEIGHTS contains weights attributed to various constructs. */
3372 static
3373 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3375 int cost;
3376 gimple_stmt_iterator gsi;
3378 cost = 0;
3379 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3380 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3382 return cost;
3386 /* Estimate number of instructions that will be created by expanding STMT.
3387 WEIGHTS contains weights attributed to various constructs. */
3390 estimate_num_insns (gimple stmt, eni_weights *weights)
3392 unsigned cost, i;
3393 enum gimple_code code = gimple_code (stmt);
3394 tree lhs;
3395 tree rhs;
3397 switch (code)
3399 case GIMPLE_ASSIGN:
3400 /* Try to estimate the cost of assignments. We have three cases to
3401 deal with:
3402 1) Simple assignments to registers;
3403 2) Stores to things that must live in memory. This includes
3404 "normal" stores to scalars, but also assignments of large
3405 structures, or constructors of big arrays;
3407 Let us look at the first two cases, assuming we have "a = b + C":
3408 <GIMPLE_ASSIGN <var_decl "a">
3409 <plus_expr <var_decl "b"> <constant C>>
3410 If "a" is a GIMPLE register, the assignment to it is free on almost
3411 any target, because "a" usually ends up in a real register. Hence
3412 the only cost of this expression comes from the PLUS_EXPR, and we
3413 can ignore the GIMPLE_ASSIGN.
3414 If "a" is not a GIMPLE register, the assignment to "a" will most
3415 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3416 of moving something into "a", which we compute using the function
3417 estimate_move_cost. */
3418 lhs = gimple_assign_lhs (stmt);
3419 rhs = gimple_assign_rhs1 (stmt);
3421 if (is_gimple_reg (lhs))
3422 cost = 0;
3423 else
3424 cost = estimate_move_cost (TREE_TYPE (lhs));
3426 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3427 cost += estimate_move_cost (TREE_TYPE (rhs));
3429 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3430 gimple_assign_rhs1 (stmt),
3431 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3432 == GIMPLE_BINARY_RHS
3433 ? gimple_assign_rhs2 (stmt) : NULL);
3434 break;
3436 case GIMPLE_COND:
3437 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3438 gimple_op (stmt, 0),
3439 gimple_op (stmt, 1));
3440 break;
3442 case GIMPLE_SWITCH:
3443 /* Take into account cost of the switch + guess 2 conditional jumps for
3444 each case label.
3446 TODO: once the switch expansion logic is sufficiently separated, we can
3447 do better job on estimating cost of the switch. */
3448 if (weights->time_based)
3449 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3450 else
3451 cost = gimple_switch_num_labels (stmt) * 2;
3452 break;
3454 case GIMPLE_CALL:
3456 tree decl = gimple_call_fndecl (stmt);
3457 struct cgraph_node *node;
3459 /* Do not special case builtins where we see the body.
3460 This just confuse inliner. */
3461 if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
3463 /* For buitins that are likely expanded to nothing or
3464 inlined do not account operand costs. */
3465 else if (is_simple_builtin (decl))
3466 return 0;
3467 else if (is_inexpensive_builtin (decl))
3468 return weights->target_builtin_call_cost;
3469 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3471 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3472 specialize the cheap expansion we do here.
3473 ??? This asks for a more general solution. */
3474 switch (DECL_FUNCTION_CODE (decl))
3476 case BUILT_IN_POW:
3477 case BUILT_IN_POWF:
3478 case BUILT_IN_POWL:
3479 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3480 && REAL_VALUES_EQUAL
3481 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3482 return estimate_operator_cost (MULT_EXPR, weights,
3483 gimple_call_arg (stmt, 0),
3484 gimple_call_arg (stmt, 0));
3485 break;
3487 default:
3488 break;
3492 cost = weights->call_cost;
3493 if (gimple_call_lhs (stmt))
3494 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3495 for (i = 0; i < gimple_call_num_args (stmt); i++)
3497 tree arg = gimple_call_arg (stmt, i);
3498 cost += estimate_move_cost (TREE_TYPE (arg));
3500 break;
3503 case GIMPLE_RETURN:
3504 return weights->return_cost;
3506 case GIMPLE_GOTO:
3507 case GIMPLE_LABEL:
3508 case GIMPLE_NOP:
3509 case GIMPLE_PHI:
3510 case GIMPLE_PREDICT:
3511 case GIMPLE_DEBUG:
3512 return 0;
3514 case GIMPLE_ASM:
3515 return asm_str_count (gimple_asm_string (stmt));
3517 case GIMPLE_RESX:
3518 /* This is either going to be an external function call with one
3519 argument, or two register copy statements plus a goto. */
3520 return 2;
3522 case GIMPLE_EH_DISPATCH:
3523 /* ??? This is going to turn into a switch statement. Ideally
3524 we'd have a look at the eh region and estimate the number of
3525 edges involved. */
3526 return 10;
3528 case GIMPLE_BIND:
3529 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3531 case GIMPLE_EH_FILTER:
3532 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3534 case GIMPLE_CATCH:
3535 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3537 case GIMPLE_TRY:
3538 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3539 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3541 /* OpenMP directives are generally very expensive. */
3543 case GIMPLE_OMP_RETURN:
3544 case GIMPLE_OMP_SECTIONS_SWITCH:
3545 case GIMPLE_OMP_ATOMIC_STORE:
3546 case GIMPLE_OMP_CONTINUE:
3547 /* ...except these, which are cheap. */
3548 return 0;
3550 case GIMPLE_OMP_ATOMIC_LOAD:
3551 return weights->omp_cost;
3553 case GIMPLE_OMP_FOR:
3554 return (weights->omp_cost
3555 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3556 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3558 case GIMPLE_OMP_PARALLEL:
3559 case GIMPLE_OMP_TASK:
3560 case GIMPLE_OMP_CRITICAL:
3561 case GIMPLE_OMP_MASTER:
3562 case GIMPLE_OMP_ORDERED:
3563 case GIMPLE_OMP_SECTION:
3564 case GIMPLE_OMP_SECTIONS:
3565 case GIMPLE_OMP_SINGLE:
3566 return (weights->omp_cost
3567 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3569 default:
3570 gcc_unreachable ();
3573 return cost;
3576 /* Estimate number of instructions that will be created by expanding
3577 function FNDECL. WEIGHTS contains weights attributed to various
3578 constructs. */
3581 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3583 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3584 gimple_stmt_iterator bsi;
3585 basic_block bb;
3586 int n = 0;
3588 gcc_assert (my_function && my_function->cfg);
3589 FOR_EACH_BB_FN (bb, my_function)
3591 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3592 n += estimate_num_insns (gsi_stmt (bsi), weights);
3595 return n;
3599 /* Initializes weights used by estimate_num_insns. */
3601 void
3602 init_inline_once (void)
3604 eni_size_weights.call_cost = 1;
3605 eni_size_weights.target_builtin_call_cost = 1;
3606 eni_size_weights.div_mod_cost = 1;
3607 eni_size_weights.omp_cost = 40;
3608 eni_size_weights.time_based = false;
3609 eni_size_weights.return_cost = 1;
3611 /* Estimating time for call is difficult, since we have no idea what the
3612 called function does. In the current uses of eni_time_weights,
3613 underestimating the cost does less harm than overestimating it, so
3614 we choose a rather small value here. */
3615 eni_time_weights.call_cost = 10;
3616 eni_time_weights.target_builtin_call_cost = 1;
3617 eni_time_weights.div_mod_cost = 10;
3618 eni_time_weights.omp_cost = 40;
3619 eni_time_weights.time_based = true;
3620 eni_time_weights.return_cost = 2;
3623 /* Estimate the number of instructions in a gimple_seq. */
3626 count_insns_seq (gimple_seq seq, eni_weights *weights)
3628 gimple_stmt_iterator gsi;
3629 int n = 0;
3630 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3631 n += estimate_num_insns (gsi_stmt (gsi), weights);
3633 return n;
3637 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3639 static void
3640 prepend_lexical_block (tree current_block, tree new_block)
3642 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3643 BLOCK_SUBBLOCKS (current_block) = new_block;
3644 BLOCK_SUPERCONTEXT (new_block) = current_block;
3647 /* Add local variables from CALLEE to CALLER. */
3649 static inline void
3650 add_local_variables (struct function *callee, struct function *caller,
3651 copy_body_data *id, bool check_var_ann)
3653 tree var;
3654 unsigned ix;
3656 FOR_EACH_LOCAL_DECL (callee, ix, var)
3657 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3659 if (!check_var_ann
3660 || (var_ann (var) && add_referenced_var (var)))
3661 add_local_decl (caller, var);
3663 else if (!can_be_nonlocal (var, id))
3665 tree new_var = remap_decl (var, id);
3667 /* Remap debug-expressions. */
3668 if (TREE_CODE (new_var) == VAR_DECL
3669 && DECL_DEBUG_EXPR_IS_FROM (new_var)
3670 && new_var != var)
3672 tree tem = DECL_DEBUG_EXPR (var);
3673 bool old_regimplify = id->regimplify;
3674 id->remapping_type_depth++;
3675 walk_tree (&tem, copy_tree_body_r, id, NULL);
3676 id->remapping_type_depth--;
3677 id->regimplify = old_regimplify;
3678 SET_DECL_DEBUG_EXPR (new_var, tem);
3680 add_local_decl (caller, new_var);
3684 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3686 static bool
3687 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3689 tree use_retvar;
3690 tree fn;
3691 struct pointer_map_t *st, *dst;
3692 tree return_slot;
3693 tree modify_dest;
3694 location_t saved_location;
3695 struct cgraph_edge *cg_edge;
3696 cgraph_inline_failed_t reason;
3697 basic_block return_block;
3698 edge e;
3699 gimple_stmt_iterator gsi, stmt_gsi;
3700 bool successfully_inlined = FALSE;
3701 bool purge_dead_abnormal_edges;
3703 /* Set input_location here so we get the right instantiation context
3704 if we call instantiate_decl from inlinable_function_p. */
3705 saved_location = input_location;
3706 if (gimple_has_location (stmt))
3707 input_location = gimple_location (stmt);
3709 /* From here on, we're only interested in CALL_EXPRs. */
3710 if (gimple_code (stmt) != GIMPLE_CALL)
3711 goto egress;
3713 cg_edge = cgraph_edge (id->dst_node, stmt);
3714 gcc_checking_assert (cg_edge);
3715 /* First, see if we can figure out what function is being called.
3716 If we cannot, then there is no hope of inlining the function. */
3717 if (cg_edge->indirect_unknown_callee)
3718 goto egress;
3719 fn = cg_edge->callee->decl;
3720 gcc_checking_assert (fn);
3722 /* If FN is a declaration of a function in a nested scope that was
3723 globally declared inline, we don't set its DECL_INITIAL.
3724 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3725 C++ front-end uses it for cdtors to refer to their internal
3726 declarations, that are not real functions. Fortunately those
3727 don't have trees to be saved, so we can tell by checking their
3728 gimple_body. */
3729 if (!DECL_INITIAL (fn)
3730 && DECL_ABSTRACT_ORIGIN (fn)
3731 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3732 fn = DECL_ABSTRACT_ORIGIN (fn);
3734 /* Don't try to inline functions that are not well-suited to inlining. */
3735 if (!cgraph_inline_p (cg_edge, &reason))
3737 /* If this call was originally indirect, we do not want to emit any
3738 inlining related warnings or sorry messages because there are no
3739 guarantees regarding those. */
3740 if (cg_edge->indirect_inlining_edge)
3741 goto egress;
3743 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3744 /* Avoid warnings during early inline pass. */
3745 && cgraph_global_info_ready)
3747 sorry ("inlining failed in call to %q+F: %s", fn,
3748 _(cgraph_inline_failed_string (reason)));
3749 sorry ("called from here");
3751 else if (warn_inline
3752 && DECL_DECLARED_INLINE_P (fn)
3753 && !DECL_NO_INLINE_WARNING_P (fn)
3754 && !DECL_IN_SYSTEM_HEADER (fn)
3755 && reason != CIF_UNSPECIFIED
3756 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3757 /* Do not warn about not inlined recursive calls. */
3758 && !cgraph_edge_recursive_p (cg_edge)
3759 /* Avoid warnings during early inline pass. */
3760 && cgraph_global_info_ready)
3762 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3763 fn, _(cgraph_inline_failed_string (reason)));
3764 warning (OPT_Winline, "called from here");
3766 goto egress;
3768 fn = cg_edge->callee->decl;
3770 #ifdef ENABLE_CHECKING
3771 if (cg_edge->callee->decl != id->dst_node->decl)
3772 verify_cgraph_node (cg_edge->callee);
3773 #endif
3775 /* We will be inlining this callee. */
3776 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3778 /* Update the callers EH personality. */
3779 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3780 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3781 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3783 /* Split the block holding the GIMPLE_CALL. */
3784 e = split_block (bb, stmt);
3785 bb = e->src;
3786 return_block = e->dest;
3787 remove_edge (e);
3789 /* split_block splits after the statement; work around this by
3790 moving the call into the second block manually. Not pretty,
3791 but seems easier than doing the CFG manipulation by hand
3792 when the GIMPLE_CALL is in the last statement of BB. */
3793 stmt_gsi = gsi_last_bb (bb);
3794 gsi_remove (&stmt_gsi, false);
3796 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3797 been the source of abnormal edges. In this case, schedule
3798 the removal of dead abnormal edges. */
3799 gsi = gsi_start_bb (return_block);
3800 if (gsi_end_p (gsi))
3802 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3803 purge_dead_abnormal_edges = true;
3805 else
3807 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3808 purge_dead_abnormal_edges = false;
3811 stmt_gsi = gsi_start_bb (return_block);
3813 /* Build a block containing code to initialize the arguments, the
3814 actual inline expansion of the body, and a label for the return
3815 statements within the function to jump to. The type of the
3816 statement expression is the return type of the function call. */
3817 id->block = make_node (BLOCK);
3818 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3819 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3820 prepend_lexical_block (gimple_block (stmt), id->block);
3822 /* Local declarations will be replaced by their equivalents in this
3823 map. */
3824 st = id->decl_map;
3825 id->decl_map = pointer_map_create ();
3826 dst = id->debug_map;
3827 id->debug_map = NULL;
3829 /* Record the function we are about to inline. */
3830 id->src_fn = fn;
3831 id->src_node = cg_edge->callee;
3832 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3833 id->gimple_call = stmt;
3835 gcc_assert (!id->src_cfun->after_inlining);
3837 id->entry_bb = bb;
3838 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3840 gimple_stmt_iterator si = gsi_last_bb (bb);
3841 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3842 NOT_TAKEN),
3843 GSI_NEW_STMT);
3845 initialize_inlined_parameters (id, stmt, fn, bb);
3847 if (DECL_INITIAL (fn))
3848 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3850 /* Return statements in the function body will be replaced by jumps
3851 to the RET_LABEL. */
3852 gcc_assert (DECL_INITIAL (fn));
3853 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3855 /* Find the LHS to which the result of this call is assigned. */
3856 return_slot = NULL;
3857 if (gimple_call_lhs (stmt))
3859 modify_dest = gimple_call_lhs (stmt);
3861 /* The function which we are inlining might not return a value,
3862 in which case we should issue a warning that the function
3863 does not return a value. In that case the optimizers will
3864 see that the variable to which the value is assigned was not
3865 initialized. We do not want to issue a warning about that
3866 uninitialized variable. */
3867 if (DECL_P (modify_dest))
3868 TREE_NO_WARNING (modify_dest) = 1;
3870 if (gimple_call_return_slot_opt_p (stmt))
3872 return_slot = modify_dest;
3873 modify_dest = NULL;
3876 else
3877 modify_dest = NULL;
3879 /* If we are inlining a call to the C++ operator new, we don't want
3880 to use type based alias analysis on the return value. Otherwise
3881 we may get confused if the compiler sees that the inlined new
3882 function returns a pointer which was just deleted. See bug
3883 33407. */
3884 if (DECL_IS_OPERATOR_NEW (fn))
3886 return_slot = NULL;
3887 modify_dest = NULL;
3890 /* Declare the return variable for the function. */
3891 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
3893 /* Add local vars in this inlined callee to caller. */
3894 add_local_variables (id->src_cfun, cfun, id, true);
3896 if (dump_file && (dump_flags & TDF_DETAILS))
3898 fprintf (dump_file, "Inlining ");
3899 print_generic_expr (dump_file, id->src_fn, 0);
3900 fprintf (dump_file, " to ");
3901 print_generic_expr (dump_file, id->dst_fn, 0);
3902 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3905 /* This is it. Duplicate the callee body. Assume callee is
3906 pre-gimplified. Note that we must not alter the caller
3907 function in any way before this point, as this CALL_EXPR may be
3908 a self-referential call; if we're calling ourselves, we need to
3909 duplicate our body before altering anything. */
3910 copy_body (id, bb->count,
3911 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3912 bb, return_block, NULL, NULL);
3914 /* Reset the escaped solution. */
3915 if (cfun->gimple_df)
3916 pt_solution_reset (&cfun->gimple_df->escaped);
3918 /* Clean up. */
3919 if (id->debug_map)
3921 pointer_map_destroy (id->debug_map);
3922 id->debug_map = dst;
3924 pointer_map_destroy (id->decl_map);
3925 id->decl_map = st;
3927 /* Unlink the calls virtual operands before replacing it. */
3928 unlink_stmt_vdef (stmt);
3930 /* If the inlined function returns a result that we care about,
3931 substitute the GIMPLE_CALL with an assignment of the return
3932 variable to the LHS of the call. That is, if STMT was
3933 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3934 if (use_retvar && gimple_call_lhs (stmt))
3936 gimple old_stmt = stmt;
3937 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3938 gsi_replace (&stmt_gsi, stmt, false);
3939 if (gimple_in_ssa_p (cfun))
3940 mark_symbols_for_renaming (stmt);
3941 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3943 else
3945 /* Handle the case of inlining a function with no return
3946 statement, which causes the return value to become undefined. */
3947 if (gimple_call_lhs (stmt)
3948 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3950 tree name = gimple_call_lhs (stmt);
3951 tree var = SSA_NAME_VAR (name);
3952 tree def = gimple_default_def (cfun, var);
3954 if (def)
3956 /* If the variable is used undefined, make this name
3957 undefined via a move. */
3958 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3959 gsi_replace (&stmt_gsi, stmt, true);
3961 else
3963 /* Otherwise make this variable undefined. */
3964 gsi_remove (&stmt_gsi, true);
3965 set_default_def (var, name);
3966 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3969 else
3970 gsi_remove (&stmt_gsi, true);
3973 if (purge_dead_abnormal_edges)
3975 gimple_purge_dead_eh_edges (return_block);
3976 gimple_purge_dead_abnormal_call_edges (return_block);
3979 /* If the value of the new expression is ignored, that's OK. We
3980 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3981 the equivalent inlined version either. */
3982 if (is_gimple_assign (stmt))
3984 gcc_assert (gimple_assign_single_p (stmt)
3985 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3986 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3989 /* Output the inlining info for this abstract function, since it has been
3990 inlined. If we don't do this now, we can lose the information about the
3991 variables in the function when the blocks get blown away as soon as we
3992 remove the cgraph node. */
3993 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3995 /* Update callgraph if needed. */
3996 cgraph_remove_node (cg_edge->callee);
3998 id->block = NULL_TREE;
3999 successfully_inlined = TRUE;
4001 egress:
4002 input_location = saved_location;
4003 return successfully_inlined;
4006 /* Expand call statements reachable from STMT_P.
4007 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4008 in a MODIFY_EXPR. See gimple.c:get_call_expr_in(). We can
4009 unfortunately not use that function here because we need a pointer
4010 to the CALL_EXPR, not the tree itself. */
4012 static bool
4013 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4015 gimple_stmt_iterator gsi;
4017 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4019 gimple stmt = gsi_stmt (gsi);
4021 if (is_gimple_call (stmt)
4022 && expand_call_inline (bb, stmt, id))
4023 return true;
4026 return false;
4030 /* Walk all basic blocks created after FIRST and try to fold every statement
4031 in the STATEMENTS pointer set. */
4033 static void
4034 fold_marked_statements (int first, struct pointer_set_t *statements)
4036 for (; first < n_basic_blocks; first++)
4037 if (BASIC_BLOCK (first))
4039 gimple_stmt_iterator gsi;
4041 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4042 !gsi_end_p (gsi);
4043 gsi_next (&gsi))
4044 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4046 gimple old_stmt = gsi_stmt (gsi);
4047 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4049 if (old_decl && DECL_BUILT_IN (old_decl))
4051 /* Folding builtins can create multiple instructions,
4052 we need to look at all of them. */
4053 gimple_stmt_iterator i2 = gsi;
4054 gsi_prev (&i2);
4055 if (fold_stmt (&gsi))
4057 gimple new_stmt;
4058 /* If a builtin at the end of a bb folded into nothing,
4059 the following loop won't work. */
4060 if (gsi_end_p (gsi))
4062 cgraph_update_edges_for_call_stmt (old_stmt,
4063 old_decl, NULL);
4064 break;
4066 if (gsi_end_p (i2))
4067 i2 = gsi_start_bb (BASIC_BLOCK (first));
4068 else
4069 gsi_next (&i2);
4070 while (1)
4072 new_stmt = gsi_stmt (i2);
4073 update_stmt (new_stmt);
4074 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4075 new_stmt);
4077 if (new_stmt == gsi_stmt (gsi))
4079 /* It is okay to check only for the very last
4080 of these statements. If it is a throwing
4081 statement nothing will change. If it isn't
4082 this can remove EH edges. If that weren't
4083 correct then because some intermediate stmts
4084 throw, but not the last one. That would mean
4085 we'd have to split the block, which we can't
4086 here and we'd loose anyway. And as builtins
4087 probably never throw, this all
4088 is mood anyway. */
4089 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4090 new_stmt))
4091 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4092 break;
4094 gsi_next (&i2);
4098 else if (fold_stmt (&gsi))
4100 /* Re-read the statement from GSI as fold_stmt() may
4101 have changed it. */
4102 gimple new_stmt = gsi_stmt (gsi);
4103 update_stmt (new_stmt);
4105 if (is_gimple_call (old_stmt)
4106 || is_gimple_call (new_stmt))
4107 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4108 new_stmt);
4110 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4111 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4117 /* Return true if BB has at least one abnormal outgoing edge. */
4119 static inline bool
4120 has_abnormal_outgoing_edge_p (basic_block bb)
4122 edge e;
4123 edge_iterator ei;
4125 FOR_EACH_EDGE (e, ei, bb->succs)
4126 if (e->flags & EDGE_ABNORMAL)
4127 return true;
4129 return false;
4132 /* Expand calls to inline functions in the body of FN. */
4134 unsigned int
4135 optimize_inline_calls (tree fn)
4137 copy_body_data id;
4138 basic_block bb;
4139 int last = n_basic_blocks;
4140 struct gimplify_ctx gctx;
4141 bool inlined_p = false;
4143 /* There is no point in performing inlining if errors have already
4144 occurred -- and we might crash if we try to inline invalid
4145 code. */
4146 if (seen_error ())
4147 return 0;
4149 /* Clear out ID. */
4150 memset (&id, 0, sizeof (id));
4152 id.src_node = id.dst_node = cgraph_get_node (fn);
4153 gcc_assert (id.dst_node->analyzed);
4154 id.dst_fn = fn;
4155 /* Or any functions that aren't finished yet. */
4156 if (current_function_decl)
4157 id.dst_fn = current_function_decl;
4159 id.copy_decl = copy_decl_maybe_to_var;
4160 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4161 id.transform_new_cfg = false;
4162 id.transform_return_to_modify = true;
4163 id.transform_lang_insert_block = NULL;
4164 id.statements_to_fold = pointer_set_create ();
4166 push_gimplify_context (&gctx);
4168 /* We make no attempts to keep dominance info up-to-date. */
4169 free_dominance_info (CDI_DOMINATORS);
4170 free_dominance_info (CDI_POST_DOMINATORS);
4172 /* Register specific gimple functions. */
4173 gimple_register_cfg_hooks ();
4175 /* Reach the trees by walking over the CFG, and note the
4176 enclosing basic-blocks in the call edges. */
4177 /* We walk the blocks going forward, because inlined function bodies
4178 will split id->current_basic_block, and the new blocks will
4179 follow it; we'll trudge through them, processing their CALL_EXPRs
4180 along the way. */
4181 FOR_EACH_BB (bb)
4182 inlined_p |= gimple_expand_calls_inline (bb, &id);
4184 pop_gimplify_context (NULL);
4186 #ifdef ENABLE_CHECKING
4188 struct cgraph_edge *e;
4190 verify_cgraph_node (id.dst_node);
4192 /* Double check that we inlined everything we are supposed to inline. */
4193 for (e = id.dst_node->callees; e; e = e->next_callee)
4194 gcc_assert (e->inline_failed);
4196 #endif
4198 /* Fold queued statements. */
4199 fold_marked_statements (last, id.statements_to_fold);
4200 pointer_set_destroy (id.statements_to_fold);
4202 gcc_assert (!id.debug_stmts);
4204 /* If we didn't inline into the function there is nothing to do. */
4205 if (!inlined_p)
4206 return 0;
4208 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4209 number_blocks (fn);
4211 delete_unreachable_blocks_update_callgraph (&id);
4212 #ifdef ENABLE_CHECKING
4213 verify_cgraph_node (id.dst_node);
4214 #endif
4216 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4217 not possible yet - the IPA passes might make various functions to not
4218 throw and they don't care to proactively update local EH info. This is
4219 done later in fixup_cfg pass that also execute the verification. */
4220 return (TODO_update_ssa
4221 | TODO_cleanup_cfg
4222 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4223 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4224 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4227 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4229 tree
4230 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4232 enum tree_code code = TREE_CODE (*tp);
4233 enum tree_code_class cl = TREE_CODE_CLASS (code);
4235 /* We make copies of most nodes. */
4236 if (IS_EXPR_CODE_CLASS (cl)
4237 || code == TREE_LIST
4238 || code == TREE_VEC
4239 || code == TYPE_DECL
4240 || code == OMP_CLAUSE)
4242 /* Because the chain gets clobbered when we make a copy, we save it
4243 here. */
4244 tree chain = NULL_TREE, new_tree;
4246 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4247 chain = TREE_CHAIN (*tp);
4249 /* Copy the node. */
4250 new_tree = copy_node (*tp);
4252 /* Propagate mudflap marked-ness. */
4253 if (flag_mudflap && mf_marked_p (*tp))
4254 mf_mark (new_tree);
4256 *tp = new_tree;
4258 /* Now, restore the chain, if appropriate. That will cause
4259 walk_tree to walk into the chain as well. */
4260 if (code == PARM_DECL
4261 || code == TREE_LIST
4262 || code == OMP_CLAUSE)
4263 TREE_CHAIN (*tp) = chain;
4265 /* For now, we don't update BLOCKs when we make copies. So, we
4266 have to nullify all BIND_EXPRs. */
4267 if (TREE_CODE (*tp) == BIND_EXPR)
4268 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4270 else if (code == CONSTRUCTOR)
4272 /* CONSTRUCTOR nodes need special handling because
4273 we need to duplicate the vector of elements. */
4274 tree new_tree;
4276 new_tree = copy_node (*tp);
4278 /* Propagate mudflap marked-ness. */
4279 if (flag_mudflap && mf_marked_p (*tp))
4280 mf_mark (new_tree);
4282 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4283 CONSTRUCTOR_ELTS (*tp));
4284 *tp = new_tree;
4286 else if (code == STATEMENT_LIST)
4287 /* We used to just abort on STATEMENT_LIST, but we can run into them
4288 with statement-expressions (c++/40975). */
4289 copy_statement_list (tp);
4290 else if (TREE_CODE_CLASS (code) == tcc_type)
4291 *walk_subtrees = 0;
4292 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4293 *walk_subtrees = 0;
4294 else if (TREE_CODE_CLASS (code) == tcc_constant)
4295 *walk_subtrees = 0;
4296 return NULL_TREE;
4299 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4300 information indicating to what new SAVE_EXPR this one should be mapped,
4301 use that one. Otherwise, create a new node and enter it in ST. FN is
4302 the function into which the copy will be placed. */
4304 static void
4305 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4307 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4308 tree *n;
4309 tree t;
4311 /* See if we already encountered this SAVE_EXPR. */
4312 n = (tree *) pointer_map_contains (st, *tp);
4314 /* If we didn't already remap this SAVE_EXPR, do so now. */
4315 if (!n)
4317 t = copy_node (*tp);
4319 /* Remember this SAVE_EXPR. */
4320 *pointer_map_insert (st, *tp) = t;
4321 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4322 *pointer_map_insert (st, t) = t;
4324 else
4326 /* We've already walked into this SAVE_EXPR; don't do it again. */
4327 *walk_subtrees = 0;
4328 t = *n;
4331 /* Replace this SAVE_EXPR with the copy. */
4332 *tp = t;
4335 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4336 copies the declaration and enters it in the splay_tree in DATA (which is
4337 really an `copy_body_data *'). */
4339 static tree
4340 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4341 void *data)
4343 copy_body_data *id = (copy_body_data *) data;
4345 /* Don't walk into types. */
4346 if (TYPE_P (*tp))
4347 *walk_subtrees = 0;
4349 else if (TREE_CODE (*tp) == LABEL_EXPR)
4351 tree decl = TREE_OPERAND (*tp, 0);
4353 /* Copy the decl and remember the copy. */
4354 insert_decl_map (id, decl, id->copy_decl (decl, id));
4357 return NULL_TREE;
4360 /* Perform any modifications to EXPR required when it is unsaved. Does
4361 not recurse into EXPR's subtrees. */
4363 static void
4364 unsave_expr_1 (tree expr)
4366 switch (TREE_CODE (expr))
4368 case TARGET_EXPR:
4369 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4370 It's OK for this to happen if it was part of a subtree that
4371 isn't immediately expanded, such as operand 2 of another
4372 TARGET_EXPR. */
4373 if (TREE_OPERAND (expr, 1))
4374 break;
4376 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4377 TREE_OPERAND (expr, 3) = NULL_TREE;
4378 break;
4380 default:
4381 break;
4385 /* Called via walk_tree when an expression is unsaved. Using the
4386 splay_tree pointed to by ST (which is really a `splay_tree'),
4387 remaps all local declarations to appropriate replacements. */
4389 static tree
4390 unsave_r (tree *tp, int *walk_subtrees, void *data)
4392 copy_body_data *id = (copy_body_data *) data;
4393 struct pointer_map_t *st = id->decl_map;
4394 tree *n;
4396 /* Only a local declaration (variable or label). */
4397 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4398 || TREE_CODE (*tp) == LABEL_DECL)
4400 /* Lookup the declaration. */
4401 n = (tree *) pointer_map_contains (st, *tp);
4403 /* If it's there, remap it. */
4404 if (n)
4405 *tp = *n;
4408 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4409 gcc_unreachable ();
4410 else if (TREE_CODE (*tp) == BIND_EXPR)
4411 copy_bind_expr (tp, walk_subtrees, id);
4412 else if (TREE_CODE (*tp) == SAVE_EXPR
4413 || TREE_CODE (*tp) == TARGET_EXPR)
4414 remap_save_expr (tp, st, walk_subtrees);
4415 else
4417 copy_tree_r (tp, walk_subtrees, NULL);
4419 /* Do whatever unsaving is required. */
4420 unsave_expr_1 (*tp);
4423 /* Keep iterating. */
4424 return NULL_TREE;
4427 /* Copies everything in EXPR and replaces variables, labels
4428 and SAVE_EXPRs local to EXPR. */
4430 tree
4431 unsave_expr_now (tree expr)
4433 copy_body_data id;
4435 /* There's nothing to do for NULL_TREE. */
4436 if (expr == 0)
4437 return expr;
4439 /* Set up ID. */
4440 memset (&id, 0, sizeof (id));
4441 id.src_fn = current_function_decl;
4442 id.dst_fn = current_function_decl;
4443 id.decl_map = pointer_map_create ();
4444 id.debug_map = NULL;
4446 id.copy_decl = copy_decl_no_change;
4447 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4448 id.transform_new_cfg = false;
4449 id.transform_return_to_modify = false;
4450 id.transform_lang_insert_block = NULL;
4452 /* Walk the tree once to find local labels. */
4453 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4455 /* Walk the tree again, copying, remapping, and unsaving. */
4456 walk_tree (&expr, unsave_r, &id, NULL);
4458 /* Clean up. */
4459 pointer_map_destroy (id.decl_map);
4460 if (id.debug_map)
4461 pointer_map_destroy (id.debug_map);
4463 return expr;
4466 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4467 label, copies the declaration and enters it in the splay_tree in DATA (which
4468 is really a 'copy_body_data *'. */
4470 static tree
4471 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4472 bool *handled_ops_p ATTRIBUTE_UNUSED,
4473 struct walk_stmt_info *wi)
4475 copy_body_data *id = (copy_body_data *) wi->info;
4476 gimple stmt = gsi_stmt (*gsip);
4478 if (gimple_code (stmt) == GIMPLE_LABEL)
4480 tree decl = gimple_label_label (stmt);
4482 /* Copy the decl and remember the copy. */
4483 insert_decl_map (id, decl, id->copy_decl (decl, id));
4486 return NULL_TREE;
4490 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4491 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4492 remaps all local declarations to appropriate replacements in gimple
4493 operands. */
4495 static tree
4496 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4498 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4499 copy_body_data *id = (copy_body_data *) wi->info;
4500 struct pointer_map_t *st = id->decl_map;
4501 tree *n;
4502 tree expr = *tp;
4504 /* Only a local declaration (variable or label). */
4505 if ((TREE_CODE (expr) == VAR_DECL
4506 && !TREE_STATIC (expr))
4507 || TREE_CODE (expr) == LABEL_DECL)
4509 /* Lookup the declaration. */
4510 n = (tree *) pointer_map_contains (st, expr);
4512 /* If it's there, remap it. */
4513 if (n)
4514 *tp = *n;
4515 *walk_subtrees = 0;
4517 else if (TREE_CODE (expr) == STATEMENT_LIST
4518 || TREE_CODE (expr) == BIND_EXPR
4519 || TREE_CODE (expr) == SAVE_EXPR)
4520 gcc_unreachable ();
4521 else if (TREE_CODE (expr) == TARGET_EXPR)
4523 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4524 It's OK for this to happen if it was part of a subtree that
4525 isn't immediately expanded, such as operand 2 of another
4526 TARGET_EXPR. */
4527 if (!TREE_OPERAND (expr, 1))
4529 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4530 TREE_OPERAND (expr, 3) = NULL_TREE;
4534 /* Keep iterating. */
4535 return NULL_TREE;
4539 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4540 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4541 remaps all local declarations to appropriate replacements in gimple
4542 statements. */
4544 static tree
4545 replace_locals_stmt (gimple_stmt_iterator *gsip,
4546 bool *handled_ops_p ATTRIBUTE_UNUSED,
4547 struct walk_stmt_info *wi)
4549 copy_body_data *id = (copy_body_data *) wi->info;
4550 gimple stmt = gsi_stmt (*gsip);
4552 if (gimple_code (stmt) == GIMPLE_BIND)
4554 tree block = gimple_bind_block (stmt);
4556 if (block)
4558 remap_block (&block, id);
4559 gimple_bind_set_block (stmt, block);
4562 /* This will remap a lot of the same decls again, but this should be
4563 harmless. */
4564 if (gimple_bind_vars (stmt))
4565 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4568 /* Keep iterating. */
4569 return NULL_TREE;
4573 /* Copies everything in SEQ and replaces variables and labels local to
4574 current_function_decl. */
4576 gimple_seq
4577 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4579 copy_body_data id;
4580 struct walk_stmt_info wi;
4581 struct pointer_set_t *visited;
4582 gimple_seq copy;
4584 /* There's nothing to do for NULL_TREE. */
4585 if (seq == NULL)
4586 return seq;
4588 /* Set up ID. */
4589 memset (&id, 0, sizeof (id));
4590 id.src_fn = current_function_decl;
4591 id.dst_fn = current_function_decl;
4592 id.decl_map = pointer_map_create ();
4593 id.debug_map = NULL;
4595 id.copy_decl = copy_decl_no_change;
4596 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4597 id.transform_new_cfg = false;
4598 id.transform_return_to_modify = false;
4599 id.transform_lang_insert_block = NULL;
4601 /* Walk the tree once to find local labels. */
4602 memset (&wi, 0, sizeof (wi));
4603 visited = pointer_set_create ();
4604 wi.info = &id;
4605 wi.pset = visited;
4606 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4607 pointer_set_destroy (visited);
4609 copy = gimple_seq_copy (seq);
4611 /* Walk the copy, remapping decls. */
4612 memset (&wi, 0, sizeof (wi));
4613 wi.info = &id;
4614 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4616 /* Clean up. */
4617 pointer_map_destroy (id.decl_map);
4618 if (id.debug_map)
4619 pointer_map_destroy (id.debug_map);
4621 return copy;
4625 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4627 static tree
4628 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4630 if (*tp == data)
4631 return (tree) data;
4632 else
4633 return NULL;
4636 DEBUG_FUNCTION bool
4637 debug_find_tree (tree top, tree search)
4639 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4643 /* Declare the variables created by the inliner. Add all the variables in
4644 VARS to BIND_EXPR. */
4646 static void
4647 declare_inline_vars (tree block, tree vars)
4649 tree t;
4650 for (t = vars; t; t = DECL_CHAIN (t))
4652 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4653 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4654 add_local_decl (cfun, t);
4657 if (block)
4658 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4661 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4662 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4663 VAR_DECL translation. */
4665 static tree
4666 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4668 /* Don't generate debug information for the copy if we wouldn't have
4669 generated it for the copy either. */
4670 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4671 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4673 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4674 declaration inspired this copy. */
4675 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4677 /* The new variable/label has no RTL, yet. */
4678 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4679 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4680 SET_DECL_RTL (copy, 0);
4682 /* These args would always appear unused, if not for this. */
4683 TREE_USED (copy) = 1;
4685 /* Set the context for the new declaration. */
4686 if (!DECL_CONTEXT (decl))
4687 /* Globals stay global. */
4689 else if (DECL_CONTEXT (decl) != id->src_fn)
4690 /* Things that weren't in the scope of the function we're inlining
4691 from aren't in the scope we're inlining to, either. */
4693 else if (TREE_STATIC (decl))
4694 /* Function-scoped static variables should stay in the original
4695 function. */
4697 else
4698 /* Ordinary automatic local variables are now in the scope of the
4699 new function. */
4700 DECL_CONTEXT (copy) = id->dst_fn;
4702 if (TREE_CODE (decl) == VAR_DECL
4703 /* C++ clones functions during parsing, before
4704 referenced_vars. */
4705 && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
4706 && referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
4707 DECL_UID (decl)))
4708 add_referenced_var (copy);
4710 return copy;
4713 static tree
4714 copy_decl_to_var (tree decl, copy_body_data *id)
4716 tree copy, type;
4718 gcc_assert (TREE_CODE (decl) == PARM_DECL
4719 || TREE_CODE (decl) == RESULT_DECL);
4721 type = TREE_TYPE (decl);
4723 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4724 VAR_DECL, DECL_NAME (decl), type);
4725 if (DECL_PT_UID_SET_P (decl))
4726 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4727 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4728 TREE_READONLY (copy) = TREE_READONLY (decl);
4729 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4730 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4732 return copy_decl_for_dup_finish (id, decl, copy);
4735 /* Like copy_decl_to_var, but create a return slot object instead of a
4736 pointer variable for return by invisible reference. */
4738 static tree
4739 copy_result_decl_to_var (tree decl, copy_body_data *id)
4741 tree copy, type;
4743 gcc_assert (TREE_CODE (decl) == PARM_DECL
4744 || TREE_CODE (decl) == RESULT_DECL);
4746 type = TREE_TYPE (decl);
4747 if (DECL_BY_REFERENCE (decl))
4748 type = TREE_TYPE (type);
4750 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4751 VAR_DECL, DECL_NAME (decl), type);
4752 if (DECL_PT_UID_SET_P (decl))
4753 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4754 TREE_READONLY (copy) = TREE_READONLY (decl);
4755 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4756 if (!DECL_BY_REFERENCE (decl))
4758 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4759 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4762 return copy_decl_for_dup_finish (id, decl, copy);
4765 tree
4766 copy_decl_no_change (tree decl, copy_body_data *id)
4768 tree copy;
4770 copy = copy_node (decl);
4772 /* The COPY is not abstract; it will be generated in DST_FN. */
4773 DECL_ABSTRACT (copy) = 0;
4774 lang_hooks.dup_lang_specific_decl (copy);
4776 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4777 been taken; it's for internal bookkeeping in expand_goto_internal. */
4778 if (TREE_CODE (copy) == LABEL_DECL)
4780 TREE_ADDRESSABLE (copy) = 0;
4781 LABEL_DECL_UID (copy) = -1;
4784 return copy_decl_for_dup_finish (id, decl, copy);
4787 static tree
4788 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4790 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4791 return copy_decl_to_var (decl, id);
4792 else
4793 return copy_decl_no_change (decl, id);
4796 /* Return a copy of the function's argument tree. */
4797 static tree
4798 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4799 bitmap args_to_skip, tree *vars)
4801 tree arg, *parg;
4802 tree new_parm = NULL;
4803 int i = 0;
4805 parg = &new_parm;
4807 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
4808 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4810 tree new_tree = remap_decl (arg, id);
4811 lang_hooks.dup_lang_specific_decl (new_tree);
4812 *parg = new_tree;
4813 parg = &DECL_CHAIN (new_tree);
4815 else if (!pointer_map_contains (id->decl_map, arg))
4817 /* Make an equivalent VAR_DECL. If the argument was used
4818 as temporary variable later in function, the uses will be
4819 replaced by local variable. */
4820 tree var = copy_decl_to_var (arg, id);
4821 add_referenced_var (var);
4822 insert_decl_map (id, arg, var);
4823 /* Declare this new variable. */
4824 DECL_CHAIN (var) = *vars;
4825 *vars = var;
4827 return new_parm;
4830 /* Return a copy of the function's static chain. */
4831 static tree
4832 copy_static_chain (tree static_chain, copy_body_data * id)
4834 tree *chain_copy, *pvar;
4836 chain_copy = &static_chain;
4837 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
4839 tree new_tree = remap_decl (*pvar, id);
4840 lang_hooks.dup_lang_specific_decl (new_tree);
4841 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
4842 *pvar = new_tree;
4844 return static_chain;
4847 /* Return true if the function is allowed to be versioned.
4848 This is a guard for the versioning functionality. */
4850 bool
4851 tree_versionable_function_p (tree fndecl)
4853 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4854 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4857 /* Delete all unreachable basic blocks and update callgraph.
4858 Doing so is somewhat nontrivial because we need to update all clones and
4859 remove inline function that become unreachable. */
4861 static bool
4862 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4864 bool changed = false;
4865 basic_block b, next_bb;
4867 find_unreachable_blocks ();
4869 /* Delete all unreachable basic blocks. */
4871 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4873 next_bb = b->next_bb;
4875 if (!(b->flags & BB_REACHABLE))
4877 gimple_stmt_iterator bsi;
4879 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4880 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4882 struct cgraph_edge *e;
4883 struct cgraph_node *node;
4885 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4887 if (!e->inline_failed)
4888 cgraph_remove_node_and_inline_clones (e->callee);
4889 else
4890 cgraph_remove_edge (e);
4892 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4893 && id->dst_node->clones)
4894 for (node = id->dst_node->clones; node != id->dst_node;)
4896 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4898 if (!e->inline_failed)
4899 cgraph_remove_node_and_inline_clones (e->callee);
4900 else
4901 cgraph_remove_edge (e);
4904 if (node->clones)
4905 node = node->clones;
4906 else if (node->next_sibling_clone)
4907 node = node->next_sibling_clone;
4908 else
4910 while (node != id->dst_node && !node->next_sibling_clone)
4911 node = node->clone_of;
4912 if (node != id->dst_node)
4913 node = node->next_sibling_clone;
4917 delete_basic_block (b);
4918 changed = true;
4922 return changed;
4925 /* Update clone info after duplication. */
4927 static void
4928 update_clone_info (copy_body_data * id)
4930 struct cgraph_node *node;
4931 if (!id->dst_node->clones)
4932 return;
4933 for (node = id->dst_node->clones; node != id->dst_node;)
4935 /* First update replace maps to match the new body. */
4936 if (node->clone.tree_map)
4938 unsigned int i;
4939 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4941 struct ipa_replace_map *replace_info;
4942 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4943 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4944 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4947 if (node->clones)
4948 node = node->clones;
4949 else if (node->next_sibling_clone)
4950 node = node->next_sibling_clone;
4951 else
4953 while (node != id->dst_node && !node->next_sibling_clone)
4954 node = node->clone_of;
4955 if (node != id->dst_node)
4956 node = node->next_sibling_clone;
4961 /* Create a copy of a function's tree.
4962 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4963 of the original function and the new copied function
4964 respectively. In case we want to replace a DECL
4965 tree with another tree while duplicating the function's
4966 body, TREE_MAP represents the mapping between these
4967 trees. If UPDATE_CLONES is set, the call_stmt fields
4968 of edges of clones of the function will be updated.
4970 If non-NULL ARGS_TO_SKIP determine function parameters to remove
4971 from new version.
4972 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
4973 If non_NULL NEW_ENTRY determine new entry BB of the clone.
4975 void
4976 tree_function_versioning (tree old_decl, tree new_decl,
4977 VEC(ipa_replace_map_p,gc)* tree_map,
4978 bool update_clones, bitmap args_to_skip,
4979 bitmap blocks_to_copy, basic_block new_entry)
4981 struct cgraph_node *old_version_node;
4982 struct cgraph_node *new_version_node;
4983 copy_body_data id;
4984 tree p;
4985 unsigned i;
4986 struct ipa_replace_map *replace_info;
4987 basic_block old_entry_block, bb;
4988 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4990 tree old_current_function_decl = current_function_decl;
4991 tree vars = NULL_TREE;
4993 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4994 && TREE_CODE (new_decl) == FUNCTION_DECL);
4995 DECL_POSSIBLY_INLINED (old_decl) = 1;
4997 old_version_node = cgraph_get_node (old_decl);
4998 gcc_checking_assert (old_version_node);
4999 new_version_node = cgraph_get_node (new_decl);
5000 gcc_checking_assert (new_version_node);
5002 /* Output the inlining info for this abstract function, since it has been
5003 inlined. If we don't do this now, we can lose the information about the
5004 variables in the function when the blocks get blown away as soon as we
5005 remove the cgraph node. */
5006 (*debug_hooks->outlining_inline_function) (old_decl);
5008 DECL_ARTIFICIAL (new_decl) = 1;
5009 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5010 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5012 /* Prepare the data structures for the tree copy. */
5013 memset (&id, 0, sizeof (id));
5015 /* Generate a new name for the new version. */
5016 id.statements_to_fold = pointer_set_create ();
5018 id.decl_map = pointer_map_create ();
5019 id.debug_map = NULL;
5020 id.src_fn = old_decl;
5021 id.dst_fn = new_decl;
5022 id.src_node = old_version_node;
5023 id.dst_node = new_version_node;
5024 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5025 if (id.src_node->ipa_transforms_to_apply)
5027 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5028 unsigned int i;
5030 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5031 id.src_node->ipa_transforms_to_apply);
5032 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5033 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5034 VEC_index (ipa_opt_pass,
5035 old_transforms_to_apply,
5036 i));
5039 id.copy_decl = copy_decl_no_change;
5040 id.transform_call_graph_edges
5041 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5042 id.transform_new_cfg = true;
5043 id.transform_return_to_modify = false;
5044 id.transform_lang_insert_block = NULL;
5046 current_function_decl = new_decl;
5047 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5048 (DECL_STRUCT_FUNCTION (old_decl));
5049 initialize_cfun (new_decl, old_decl,
5050 old_entry_block->count);
5051 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5052 = id.src_cfun->gimple_df->ipa_pta;
5053 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
5055 /* Copy the function's static chain. */
5056 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5057 if (p)
5058 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5059 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5060 &id);
5062 /* If there's a tree_map, prepare for substitution. */
5063 if (tree_map)
5064 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
5066 gimple init;
5067 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
5068 if (replace_info->replace_p)
5070 tree op = replace_info->new_tree;
5071 if (!replace_info->old_tree)
5073 int i = replace_info->parm_num;
5074 tree parm;
5075 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5076 i --;
5077 replace_info->old_tree = parm;
5081 STRIP_NOPS (op);
5083 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5084 op = TREE_OPERAND (op, 0);
5086 if (TREE_CODE (op) == ADDR_EXPR)
5088 op = TREE_OPERAND (op, 0);
5089 while (handled_component_p (op))
5090 op = TREE_OPERAND (op, 0);
5091 if (TREE_CODE (op) == VAR_DECL)
5092 add_referenced_var (op);
5094 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5095 init = setup_one_parameter (&id, replace_info->old_tree,
5096 replace_info->new_tree, id.src_fn,
5097 NULL,
5098 &vars);
5099 if (init)
5100 VEC_safe_push (gimple, heap, init_stmts, init);
5103 /* Copy the function's arguments. */
5104 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5105 DECL_ARGUMENTS (new_decl) =
5106 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5107 args_to_skip, &vars);
5109 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5110 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5112 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5114 if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5115 /* Add local vars. */
5116 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id, false);
5118 if (DECL_RESULT (old_decl) != NULL_TREE)
5120 tree old_name;
5121 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5122 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5123 if (gimple_in_ssa_p (id.src_cfun)
5124 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5125 && (old_name
5126 = gimple_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5128 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5129 insert_decl_map (&id, old_name, new_name);
5130 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5131 set_default_def (DECL_RESULT (new_decl), new_name);
5135 /* Copy the Function's body. */
5136 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5137 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5139 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5140 number_blocks (new_decl);
5142 /* We want to create the BB unconditionally, so that the addition of
5143 debug stmts doesn't affect BB count, which may in the end cause
5144 codegen differences. */
5145 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5146 while (VEC_length (gimple, init_stmts))
5147 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5148 update_clone_info (&id);
5150 /* Remap the nonlocal_goto_save_area, if any. */
5151 if (cfun->nonlocal_goto_save_area)
5153 struct walk_stmt_info wi;
5155 memset (&wi, 0, sizeof (wi));
5156 wi.info = &id;
5157 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5160 /* Clean up. */
5161 pointer_map_destroy (id.decl_map);
5162 if (id.debug_map)
5163 pointer_map_destroy (id.debug_map);
5164 free_dominance_info (CDI_DOMINATORS);
5165 free_dominance_info (CDI_POST_DOMINATORS);
5167 fold_marked_statements (0, id.statements_to_fold);
5168 pointer_set_destroy (id.statements_to_fold);
5169 fold_cond_expr_cond ();
5170 delete_unreachable_blocks_update_callgraph (&id);
5171 if (id.dst_node->analyzed)
5172 cgraph_rebuild_references ();
5173 update_ssa (TODO_update_ssa);
5175 /* After partial cloning we need to rescale frequencies, so they are
5176 within proper range in the cloned function. */
5177 if (new_entry)
5179 struct cgraph_edge *e;
5180 rebuild_frequencies ();
5182 new_version_node->count = ENTRY_BLOCK_PTR->count;
5183 for (e = new_version_node->callees; e; e = e->next_callee)
5185 basic_block bb = gimple_bb (e->call_stmt);
5186 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5187 bb);
5188 e->count = bb->count;
5190 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5192 basic_block bb = gimple_bb (e->call_stmt);
5193 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5194 bb);
5195 e->count = bb->count;
5199 free_dominance_info (CDI_DOMINATORS);
5200 free_dominance_info (CDI_POST_DOMINATORS);
5202 gcc_assert (!id.debug_stmts);
5203 VEC_free (gimple, heap, init_stmts);
5204 pop_cfun ();
5205 current_function_decl = old_current_function_decl;
5206 gcc_assert (!current_function_decl
5207 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5208 return;
5211 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5212 the callee and return the inlined body on success. */
5214 tree
5215 maybe_inline_call_in_expr (tree exp)
5217 tree fn = get_callee_fndecl (exp);
5219 /* We can only try to inline "const" functions. */
5220 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5222 struct pointer_map_t *decl_map = pointer_map_create ();
5223 call_expr_arg_iterator iter;
5224 copy_body_data id;
5225 tree param, arg, t;
5227 /* Remap the parameters. */
5228 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5229 param;
5230 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5231 *pointer_map_insert (decl_map, param) = arg;
5233 memset (&id, 0, sizeof (id));
5234 id.src_fn = fn;
5235 id.dst_fn = current_function_decl;
5236 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5237 id.decl_map = decl_map;
5239 id.copy_decl = copy_decl_no_change;
5240 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5241 id.transform_new_cfg = false;
5242 id.transform_return_to_modify = true;
5243 id.transform_lang_insert_block = NULL;
5245 /* Make sure not to unshare trees behind the front-end's back
5246 since front-end specific mechanisms may rely on sharing. */
5247 id.regimplify = false;
5248 id.do_not_unshare = true;
5250 /* We're not inside any EH region. */
5251 id.eh_lp_nr = 0;
5253 t = copy_tree_body (&id);
5254 pointer_map_destroy (decl_map);
5256 /* We can only return something suitable for use in a GENERIC
5257 expression tree. */
5258 if (TREE_CODE (t) == MODIFY_EXPR)
5259 return TREE_OPERAND (t, 1);
5262 return NULL_TREE;
5265 /* Duplicate a type, fields and all. */
5267 tree
5268 build_duplicate_type (tree type)
5270 struct copy_body_data id;
5272 memset (&id, 0, sizeof (id));
5273 id.src_fn = current_function_decl;
5274 id.dst_fn = current_function_decl;
5275 id.src_cfun = cfun;
5276 id.decl_map = pointer_map_create ();
5277 id.debug_map = NULL;
5278 id.copy_decl = copy_decl_no_change;
5280 type = remap_type_1 (type, &id);
5282 pointer_map_destroy (id.decl_map);
5283 if (id.debug_map)
5284 pointer_map_destroy (id.debug_map);
5286 TYPE_CANONICAL (type) = type;
5288 return type;