* gcc.target/powerpc/altivec-volatile.c: Adjust expected warning.
[official-gcc.git] / gcc / tree-inline.c
blobe295a6a589026fddbdb57670b3dd3bc661b988b4
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "insn-config.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-iterator.h"
37 #include "cgraph.h"
38 #include "intl.h"
39 #include "tree-mudflap.h"
40 #include "tree-flow.h"
41 #include "function.h"
42 #include "tree-flow.h"
43 #include "tree-pretty-print.h"
44 #include "except.h"
45 #include "debug.h"
46 #include "pointer-set.h"
47 #include "ipa-prop.h"
48 #include "value-prof.h"
49 #include "tree-pass.h"
50 #include "target.h"
51 #include "integrate.h"
53 #include "rtl.h" /* FIXME: For asm_str_count. */
55 /* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57 #include "gimple.h"
59 /* Inlining, Cloning, Versioning, Parallelization
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 MODIFY_EXPRs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX statements are adjusted accordingly.
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
89 See the CALL_EXPR handling case in copy_tree_body_r (). */
91 /* To Do:
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
104 /* Weights that estimate_num_insns uses to estimate the size of the
105 produced code. */
107 eni_weights eni_size_weights;
109 /* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
112 eni_weights eni_time_weights;
114 /* Prototypes. */
116 static tree declare_return_variable (copy_body_data *, tree, tree);
117 static void remap_block (tree *, copy_body_data *);
118 static void copy_bind_expr (tree *, int *, copy_body_data *);
119 static tree mark_local_for_remap_r (tree *, int *, void *);
120 static void unsave_expr_1 (tree);
121 static tree unsave_r (tree *, int *, void *);
122 static void declare_inline_vars (tree, tree);
123 static void remap_save_expr (tree *, void *, int *);
124 static void prepend_lexical_block (tree current_block, tree new_block);
125 static tree copy_decl_to_var (tree, copy_body_data *);
126 static tree copy_result_decl_to_var (tree, copy_body_data *);
127 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128 static gimple remap_gimple_stmt (gimple, copy_body_data *);
129 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
131 /* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
134 void
135 insert_decl_map (copy_body_data *id, tree key, tree value)
137 *pointer_map_insert (id->decl_map, key) = value;
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
142 *pointer_map_insert (id->decl_map, value) = value;
145 /* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
148 static void
149 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
157 if (!target_for_debug_bind (key))
158 return;
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
166 *pointer_map_insert (id->debug_map, key) = value;
169 /* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173 static int processing_debug_stmt = 0;
175 /* Construct new SSA name for old NAME. ID is the inline context. */
177 static tree
178 remap_ssa_name (tree name, copy_body_data *id)
180 tree new_tree;
181 tree *n;
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
185 n = (tree *) pointer_map_contains (id->decl_map, name);
186 if (n)
187 return unshare_expr (*n);
189 if (processing_debug_stmt)
191 processing_debug_stmt = -1;
192 return name;
195 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
196 in copy_bb. */
197 new_tree = remap_decl (SSA_NAME_VAR (name), id);
199 /* We might've substituted constant or another SSA_NAME for
200 the variable.
202 Replace the SSA name representing RESULT_DECL by variable during
203 inlining: this saves us from need to introduce PHI node in a case
204 return value is just partly initialized. */
205 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
206 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
207 || !id->transform_return_to_modify))
209 struct ptr_info_def *pi;
210 new_tree = make_ssa_name (new_tree, NULL);
211 insert_decl_map (id, name, new_tree);
212 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
213 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
214 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
215 /* At least IPA points-to info can be directly transferred. */
216 if (id->src_cfun->gimple_df
217 && id->src_cfun->gimple_df->ipa_pta
218 && (pi = SSA_NAME_PTR_INFO (name))
219 && !pi->pt.anything)
221 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
222 new_pi->pt = pi->pt;
224 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
226 /* By inlining function having uninitialized variable, we might
227 extend the lifetime (variable might get reused). This cause
228 ICE in the case we end up extending lifetime of SSA name across
229 abnormal edge, but also increase register pressure.
231 We simply initialize all uninitialized vars by 0 except
232 for case we are inlining to very first BB. We can avoid
233 this for all BBs that are not inside strongly connected
234 regions of the CFG, but this is expensive to test. */
235 if (id->entry_bb
236 && is_gimple_reg (SSA_NAME_VAR (name))
237 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
238 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
239 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
240 || EDGE_COUNT (id->entry_bb->preds) != 1))
242 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
243 gimple init_stmt;
245 init_stmt = gimple_build_assign (new_tree,
246 fold_convert (TREE_TYPE (new_tree),
247 integer_zero_node));
248 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
249 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
251 else
253 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
254 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
255 == name)
256 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
260 else
261 insert_decl_map (id, name, new_tree);
262 return new_tree;
265 /* Remap DECL during the copying of the BLOCK tree for the function. */
267 tree
268 remap_decl (tree decl, copy_body_data *id)
270 tree *n;
272 /* We only remap local variables in the current function. */
274 /* See if we have remapped this declaration. */
276 n = (tree *) pointer_map_contains (id->decl_map, decl);
278 if (!n && processing_debug_stmt)
280 processing_debug_stmt = -1;
281 return decl;
284 /* If we didn't already have an equivalent for this declaration,
285 create one now. */
286 if (!n)
288 /* Make a copy of the variable or label. */
289 tree t = id->copy_decl (decl, id);
291 /* Remember it, so that if we encounter this local entity again
292 we can reuse this copy. Do this early because remap_type may
293 need this decl for TYPE_STUB_DECL. */
294 insert_decl_map (id, decl, t);
296 if (!DECL_P (t))
297 return t;
299 /* Remap types, if necessary. */
300 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
301 if (TREE_CODE (t) == TYPE_DECL)
302 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
304 /* Remap sizes as necessary. */
305 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
306 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
308 /* If fields, do likewise for offset and qualifier. */
309 if (TREE_CODE (t) == FIELD_DECL)
311 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
312 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
313 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
316 if (cfun && gimple_in_ssa_p (cfun)
317 && (TREE_CODE (t) == VAR_DECL
318 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
320 get_var_ann (t);
321 add_referenced_var (t);
323 return t;
326 if (id->do_not_unshare)
327 return *n;
328 else
329 return unshare_expr (*n);
332 static tree
333 remap_type_1 (tree type, copy_body_data *id)
335 tree new_tree, t;
337 /* We do need a copy. build and register it now. If this is a pointer or
338 reference type, remap the designated type and make a new pointer or
339 reference type. */
340 if (TREE_CODE (type) == POINTER_TYPE)
342 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
343 TYPE_MODE (type),
344 TYPE_REF_CAN_ALIAS_ALL (type));
345 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
346 new_tree = build_type_attribute_qual_variant (new_tree,
347 TYPE_ATTRIBUTES (type),
348 TYPE_QUALS (type));
349 insert_decl_map (id, type, new_tree);
350 return new_tree;
352 else if (TREE_CODE (type) == REFERENCE_TYPE)
354 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
355 TYPE_MODE (type),
356 TYPE_REF_CAN_ALIAS_ALL (type));
357 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
358 new_tree = build_type_attribute_qual_variant (new_tree,
359 TYPE_ATTRIBUTES (type),
360 TYPE_QUALS (type));
361 insert_decl_map (id, type, new_tree);
362 return new_tree;
364 else
365 new_tree = copy_node (type);
367 insert_decl_map (id, type, new_tree);
369 /* This is a new type, not a copy of an old type. Need to reassociate
370 variants. We can handle everything except the main variant lazily. */
371 t = TYPE_MAIN_VARIANT (type);
372 if (type != t)
374 t = remap_type (t, id);
375 TYPE_MAIN_VARIANT (new_tree) = t;
376 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
377 TYPE_NEXT_VARIANT (t) = new_tree;
379 else
381 TYPE_MAIN_VARIANT (new_tree) = new_tree;
382 TYPE_NEXT_VARIANT (new_tree) = NULL;
385 if (TYPE_STUB_DECL (type))
386 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
388 /* Lazily create pointer and reference types. */
389 TYPE_POINTER_TO (new_tree) = NULL;
390 TYPE_REFERENCE_TO (new_tree) = NULL;
392 switch (TREE_CODE (new_tree))
394 case INTEGER_TYPE:
395 case REAL_TYPE:
396 case FIXED_POINT_TYPE:
397 case ENUMERAL_TYPE:
398 case BOOLEAN_TYPE:
399 t = TYPE_MIN_VALUE (new_tree);
400 if (t && TREE_CODE (t) != INTEGER_CST)
401 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
403 t = TYPE_MAX_VALUE (new_tree);
404 if (t && TREE_CODE (t) != INTEGER_CST)
405 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
406 return new_tree;
408 case FUNCTION_TYPE:
409 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
410 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
411 return new_tree;
413 case ARRAY_TYPE:
414 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
415 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
416 break;
418 case RECORD_TYPE:
419 case UNION_TYPE:
420 case QUAL_UNION_TYPE:
422 tree f, nf = NULL;
424 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
426 t = remap_decl (f, id);
427 DECL_CONTEXT (t) = new_tree;
428 TREE_CHAIN (t) = nf;
429 nf = t;
431 TYPE_FIELDS (new_tree) = nreverse (nf);
433 break;
435 case OFFSET_TYPE:
436 default:
437 /* Shouldn't have been thought variable sized. */
438 gcc_unreachable ();
441 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
442 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
444 return new_tree;
447 tree
448 remap_type (tree type, copy_body_data *id)
450 tree *node;
451 tree tmp;
453 if (type == NULL)
454 return type;
456 /* See if we have remapped this type. */
457 node = (tree *) pointer_map_contains (id->decl_map, type);
458 if (node)
459 return *node;
461 /* The type only needs remapping if it's variably modified. */
462 if (! variably_modified_type_p (type, id->src_fn))
464 insert_decl_map (id, type, type);
465 return type;
468 id->remapping_type_depth++;
469 tmp = remap_type_1 (type, id);
470 id->remapping_type_depth--;
472 return tmp;
475 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
476 is NULL or TYPE has not been remapped before. */
478 static tree
479 remapped_type (tree type, copy_body_data *id)
481 tree *node;
483 if (type == NULL)
484 return type;
486 /* See if we have remapped this type. */
487 node = (tree *) pointer_map_contains (id->decl_map, type);
488 if (node)
489 return *node;
490 else
491 return NULL;
494 /* The type only needs remapping if it's variably modified. */
495 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
497 static bool
498 can_be_nonlocal (tree decl, copy_body_data *id)
500 /* We can not duplicate function decls. */
501 if (TREE_CODE (decl) == FUNCTION_DECL)
502 return true;
504 /* Local static vars must be non-local or we get multiple declaration
505 problems. */
506 if (TREE_CODE (decl) == VAR_DECL
507 && !auto_var_in_fn_p (decl, id->src_fn))
508 return true;
510 /* At the moment dwarf2out can handle only these types of nodes. We
511 can support more later. */
512 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
513 return false;
515 /* We must use global type. We call remapped_type instead of
516 remap_type since we don't want to remap this type here if it
517 hasn't been remapped before. */
518 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
519 return false;
521 /* Wihtout SSA we can't tell if variable is used. */
522 if (!gimple_in_ssa_p (cfun))
523 return false;
525 /* Live variables must be copied so we can attach DECL_RTL. */
526 if (var_ann (decl))
527 return false;
529 return true;
532 static tree
533 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
535 tree old_var;
536 tree new_decls = NULL_TREE;
538 /* Remap its variables. */
539 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
541 tree new_var;
543 if (can_be_nonlocal (old_var, id))
545 if (TREE_CODE (old_var) == VAR_DECL
546 && ! DECL_EXTERNAL (old_var)
547 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
548 add_local_decl (cfun, old_var);
549 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
550 && !DECL_IGNORED_P (old_var)
551 && nonlocalized_list)
552 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
553 continue;
556 /* Remap the variable. */
557 new_var = remap_decl (old_var, id);
559 /* If we didn't remap this variable, we can't mess with its
560 TREE_CHAIN. If we remapped this variable to the return slot, it's
561 already declared somewhere else, so don't declare it here. */
563 if (new_var == id->retvar)
565 else if (!new_var)
567 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
568 && !DECL_IGNORED_P (old_var)
569 && nonlocalized_list)
570 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
572 else
574 gcc_assert (DECL_P (new_var));
575 TREE_CHAIN (new_var) = new_decls;
576 new_decls = new_var;
578 /* Also copy value-expressions. */
579 if (TREE_CODE (new_var) == VAR_DECL
580 && DECL_HAS_VALUE_EXPR_P (new_var))
582 tree tem = DECL_VALUE_EXPR (new_var);
583 bool old_regimplify = id->regimplify;
584 id->remapping_type_depth++;
585 walk_tree (&tem, copy_tree_body_r, id, NULL);
586 id->remapping_type_depth--;
587 id->regimplify = old_regimplify;
588 SET_DECL_VALUE_EXPR (new_var, tem);
593 return nreverse (new_decls);
596 /* Copy the BLOCK to contain remapped versions of the variables
597 therein. And hook the new block into the block-tree. */
599 static void
600 remap_block (tree *block, copy_body_data *id)
602 tree old_block;
603 tree new_block;
605 /* Make the new block. */
606 old_block = *block;
607 new_block = make_node (BLOCK);
608 TREE_USED (new_block) = TREE_USED (old_block);
609 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
610 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
611 BLOCK_NONLOCALIZED_VARS (new_block)
612 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
613 *block = new_block;
615 /* Remap its variables. */
616 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
617 &BLOCK_NONLOCALIZED_VARS (new_block),
618 id);
620 if (id->transform_lang_insert_block)
621 id->transform_lang_insert_block (new_block);
623 /* Remember the remapped block. */
624 insert_decl_map (id, old_block, new_block);
627 /* Copy the whole block tree and root it in id->block. */
628 static tree
629 remap_blocks (tree block, copy_body_data *id)
631 tree t;
632 tree new_tree = block;
634 if (!block)
635 return NULL;
637 remap_block (&new_tree, id);
638 gcc_assert (new_tree != block);
639 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
640 prepend_lexical_block (new_tree, remap_blocks (t, id));
641 /* Blocks are in arbitrary order, but make things slightly prettier and do
642 not swap order when producing a copy. */
643 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
644 return new_tree;
647 static void
648 copy_statement_list (tree *tp)
650 tree_stmt_iterator oi, ni;
651 tree new_tree;
653 new_tree = alloc_stmt_list ();
654 ni = tsi_start (new_tree);
655 oi = tsi_start (*tp);
656 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
657 *tp = new_tree;
659 for (; !tsi_end_p (oi); tsi_next (&oi))
661 tree stmt = tsi_stmt (oi);
662 if (TREE_CODE (stmt) == STATEMENT_LIST)
663 copy_statement_list (&stmt);
664 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
668 static void
669 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
671 tree block = BIND_EXPR_BLOCK (*tp);
672 /* Copy (and replace) the statement. */
673 copy_tree_r (tp, walk_subtrees, NULL);
674 if (block)
676 remap_block (&block, id);
677 BIND_EXPR_BLOCK (*tp) = block;
680 if (BIND_EXPR_VARS (*tp))
681 /* This will remap a lot of the same decls again, but this should be
682 harmless. */
683 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
687 /* Create a new gimple_seq by remapping all the statements in BODY
688 using the inlining information in ID. */
690 static gimple_seq
691 remap_gimple_seq (gimple_seq body, copy_body_data *id)
693 gimple_stmt_iterator si;
694 gimple_seq new_body = NULL;
696 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
698 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
699 gimple_seq_add_stmt (&new_body, new_stmt);
702 return new_body;
706 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
707 block using the mapping information in ID. */
709 static gimple
710 copy_gimple_bind (gimple stmt, copy_body_data *id)
712 gimple new_bind;
713 tree new_block, new_vars;
714 gimple_seq body, new_body;
716 /* Copy the statement. Note that we purposely don't use copy_stmt
717 here because we need to remap statements as we copy. */
718 body = gimple_bind_body (stmt);
719 new_body = remap_gimple_seq (body, id);
721 new_block = gimple_bind_block (stmt);
722 if (new_block)
723 remap_block (&new_block, id);
725 /* This will remap a lot of the same decls again, but this should be
726 harmless. */
727 new_vars = gimple_bind_vars (stmt);
728 if (new_vars)
729 new_vars = remap_decls (new_vars, NULL, id);
731 new_bind = gimple_build_bind (new_vars, new_body, new_block);
733 return new_bind;
737 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
738 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
739 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
740 recursing into the children nodes of *TP. */
742 static tree
743 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
745 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
746 copy_body_data *id = (copy_body_data *) wi_p->info;
747 tree fn = id->src_fn;
749 if (TREE_CODE (*tp) == SSA_NAME)
751 *tp = remap_ssa_name (*tp, id);
752 *walk_subtrees = 0;
753 return NULL;
755 else if (auto_var_in_fn_p (*tp, fn))
757 /* Local variables and labels need to be replaced by equivalent
758 variables. We don't want to copy static variables; there's
759 only one of those, no matter how many times we inline the
760 containing function. Similarly for globals from an outer
761 function. */
762 tree new_decl;
764 /* Remap the declaration. */
765 new_decl = remap_decl (*tp, id);
766 gcc_assert (new_decl);
767 /* Replace this variable with the copy. */
768 STRIP_TYPE_NOPS (new_decl);
769 /* ??? The C++ frontend uses void * pointer zero to initialize
770 any other type. This confuses the middle-end type verification.
771 As cloned bodies do not go through gimplification again the fixup
772 there doesn't trigger. */
773 if (TREE_CODE (new_decl) == INTEGER_CST
774 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
775 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
776 *tp = new_decl;
777 *walk_subtrees = 0;
779 else if (TREE_CODE (*tp) == STATEMENT_LIST)
780 gcc_unreachable ();
781 else if (TREE_CODE (*tp) == SAVE_EXPR)
782 gcc_unreachable ();
783 else if (TREE_CODE (*tp) == LABEL_DECL
784 && (!DECL_CONTEXT (*tp)
785 || decl_function_context (*tp) == id->src_fn))
786 /* These may need to be remapped for EH handling. */
787 *tp = remap_decl (*tp, id);
788 else if (TYPE_P (*tp))
789 /* Types may need remapping as well. */
790 *tp = remap_type (*tp, id);
791 else if (CONSTANT_CLASS_P (*tp))
793 /* If this is a constant, we have to copy the node iff the type
794 will be remapped. copy_tree_r will not copy a constant. */
795 tree new_type = remap_type (TREE_TYPE (*tp), id);
797 if (new_type == TREE_TYPE (*tp))
798 *walk_subtrees = 0;
800 else if (TREE_CODE (*tp) == INTEGER_CST)
801 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
802 TREE_INT_CST_HIGH (*tp));
803 else
805 *tp = copy_node (*tp);
806 TREE_TYPE (*tp) = new_type;
809 else
811 /* Otherwise, just copy the node. Note that copy_tree_r already
812 knows not to copy VAR_DECLs, etc., so this is safe. */
813 if (TREE_CODE (*tp) == MEM_REF)
815 /* We need to re-canonicalize MEM_REFs from inline substitutions
816 that can happen when a pointer argument is an ADDR_EXPR. */
817 tree decl = TREE_OPERAND (*tp, 0);
818 tree *n;
820 n = (tree *) pointer_map_contains (id->decl_map, decl);
821 if (n)
823 tree old = *tp;
824 tree ptr = unshare_expr (*n);
825 tree tem;
826 if ((tem = maybe_fold_offset_to_reference (EXPR_LOCATION (*tp),
827 ptr,
828 TREE_OPERAND (*tp, 1),
829 TREE_TYPE (*tp)))
830 && TREE_THIS_VOLATILE (tem) == TREE_THIS_VOLATILE (old))
832 tree *tem_basep = &tem;
833 while (handled_component_p (*tem_basep))
834 tem_basep = &TREE_OPERAND (*tem_basep, 0);
835 if (TREE_CODE (*tem_basep) == MEM_REF)
836 *tem_basep
837 = build2 (MEM_REF, TREE_TYPE (*tem_basep),
838 TREE_OPERAND (*tem_basep, 0),
839 fold_convert (TREE_TYPE (TREE_OPERAND (*tp, 1)),
840 TREE_OPERAND (*tem_basep, 1)));
841 else
842 *tem_basep
843 = build2 (MEM_REF, TREE_TYPE (*tem_basep),
844 build_fold_addr_expr (*tem_basep),
845 build_int_cst
846 (TREE_TYPE (TREE_OPERAND (*tp, 1)), 0));
847 *tp = tem;
849 else
851 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
852 ptr, TREE_OPERAND (*tp, 1));
853 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
855 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
856 *walk_subtrees = 0;
857 return NULL;
861 /* Here is the "usual case". Copy this tree node, and then
862 tweak some special cases. */
863 copy_tree_r (tp, walk_subtrees, NULL);
865 /* Global variables we haven't seen yet need to go into referenced
866 vars. If not referenced from types only. */
867 if (gimple_in_ssa_p (cfun)
868 && TREE_CODE (*tp) == VAR_DECL
869 && id->remapping_type_depth == 0
870 && !processing_debug_stmt)
871 add_referenced_var (*tp);
873 /* We should never have TREE_BLOCK set on non-statements. */
874 if (EXPR_P (*tp))
875 gcc_assert (!TREE_BLOCK (*tp));
877 if (TREE_CODE (*tp) != OMP_CLAUSE)
878 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
880 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
882 /* The copied TARGET_EXPR has never been expanded, even if the
883 original node was expanded already. */
884 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
885 TREE_OPERAND (*tp, 3) = NULL_TREE;
887 else if (TREE_CODE (*tp) == ADDR_EXPR)
889 /* Variable substitution need not be simple. In particular,
890 the MEM_REF substitution above. Make sure that
891 TREE_CONSTANT and friends are up-to-date. But make sure
892 to not improperly set TREE_BLOCK on some sub-expressions. */
893 int invariant = is_gimple_min_invariant (*tp);
894 tree block = id->block;
895 id->block = NULL_TREE;
896 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
897 id->block = block;
898 recompute_tree_invariant_for_addr_expr (*tp);
900 /* If this used to be invariant, but is not any longer,
901 then regimplification is probably needed. */
902 if (invariant && !is_gimple_min_invariant (*tp))
903 id->regimplify = true;
905 *walk_subtrees = 0;
909 /* Keep iterating. */
910 return NULL_TREE;
914 /* Called from copy_body_id via walk_tree. DATA is really a
915 `copy_body_data *'. */
917 tree
918 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
920 copy_body_data *id = (copy_body_data *) data;
921 tree fn = id->src_fn;
922 tree new_block;
924 /* Begin by recognizing trees that we'll completely rewrite for the
925 inlining context. Our output for these trees is completely
926 different from out input (e.g. RETURN_EXPR is deleted, and morphs
927 into an edge). Further down, we'll handle trees that get
928 duplicated and/or tweaked. */
930 /* When requested, RETURN_EXPRs should be transformed to just the
931 contained MODIFY_EXPR. The branch semantics of the return will
932 be handled elsewhere by manipulating the CFG rather than a statement. */
933 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
935 tree assignment = TREE_OPERAND (*tp, 0);
937 /* If we're returning something, just turn that into an
938 assignment into the equivalent of the original RESULT_DECL.
939 If the "assignment" is just the result decl, the result
940 decl has already been set (e.g. a recent "foo (&result_decl,
941 ...)"); just toss the entire RETURN_EXPR. */
942 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
944 /* Replace the RETURN_EXPR with (a copy of) the
945 MODIFY_EXPR hanging underneath. */
946 *tp = copy_node (assignment);
948 else /* Else the RETURN_EXPR returns no value. */
950 *tp = NULL;
951 return (tree) (void *)1;
954 else if (TREE_CODE (*tp) == SSA_NAME)
956 *tp = remap_ssa_name (*tp, id);
957 *walk_subtrees = 0;
958 return NULL;
961 /* Local variables and labels need to be replaced by equivalent
962 variables. We don't want to copy static variables; there's only
963 one of those, no matter how many times we inline the containing
964 function. Similarly for globals from an outer function. */
965 else if (auto_var_in_fn_p (*tp, fn))
967 tree new_decl;
969 /* Remap the declaration. */
970 new_decl = remap_decl (*tp, id);
971 gcc_assert (new_decl);
972 /* Replace this variable with the copy. */
973 STRIP_TYPE_NOPS (new_decl);
974 *tp = new_decl;
975 *walk_subtrees = 0;
977 else if (TREE_CODE (*tp) == STATEMENT_LIST)
978 copy_statement_list (tp);
979 else if (TREE_CODE (*tp) == SAVE_EXPR
980 || TREE_CODE (*tp) == TARGET_EXPR)
981 remap_save_expr (tp, id->decl_map, walk_subtrees);
982 else if (TREE_CODE (*tp) == LABEL_DECL
983 && (! DECL_CONTEXT (*tp)
984 || decl_function_context (*tp) == id->src_fn))
985 /* These may need to be remapped for EH handling. */
986 *tp = remap_decl (*tp, id);
987 else if (TREE_CODE (*tp) == BIND_EXPR)
988 copy_bind_expr (tp, walk_subtrees, id);
989 /* Types may need remapping as well. */
990 else if (TYPE_P (*tp))
991 *tp = remap_type (*tp, id);
993 /* If this is a constant, we have to copy the node iff the type will be
994 remapped. copy_tree_r will not copy a constant. */
995 else if (CONSTANT_CLASS_P (*tp))
997 tree new_type = remap_type (TREE_TYPE (*tp), id);
999 if (new_type == TREE_TYPE (*tp))
1000 *walk_subtrees = 0;
1002 else if (TREE_CODE (*tp) == INTEGER_CST)
1003 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1004 TREE_INT_CST_HIGH (*tp));
1005 else
1007 *tp = copy_node (*tp);
1008 TREE_TYPE (*tp) = new_type;
1012 /* Otherwise, just copy the node. Note that copy_tree_r already
1013 knows not to copy VAR_DECLs, etc., so this is safe. */
1014 else
1016 /* Here we handle trees that are not completely rewritten.
1017 First we detect some inlining-induced bogosities for
1018 discarding. */
1019 if (TREE_CODE (*tp) == MODIFY_EXPR
1020 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1021 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1023 /* Some assignments VAR = VAR; don't generate any rtl code
1024 and thus don't count as variable modification. Avoid
1025 keeping bogosities like 0 = 0. */
1026 tree decl = TREE_OPERAND (*tp, 0), value;
1027 tree *n;
1029 n = (tree *) pointer_map_contains (id->decl_map, decl);
1030 if (n)
1032 value = *n;
1033 STRIP_TYPE_NOPS (value);
1034 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1036 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1037 return copy_tree_body_r (tp, walk_subtrees, data);
1041 else if (TREE_CODE (*tp) == INDIRECT_REF)
1043 /* Get rid of *& from inline substitutions that can happen when a
1044 pointer argument is an ADDR_EXPR. */
1045 tree decl = TREE_OPERAND (*tp, 0);
1046 tree *n;
1048 n = (tree *) pointer_map_contains (id->decl_map, decl);
1049 if (n)
1051 tree new_tree;
1052 tree old;
1053 /* If we happen to get an ADDR_EXPR in n->value, strip
1054 it manually here as we'll eventually get ADDR_EXPRs
1055 which lie about their types pointed to. In this case
1056 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1057 but we absolutely rely on that. As fold_indirect_ref
1058 does other useful transformations, try that first, though. */
1059 tree type = TREE_TYPE (TREE_TYPE (*n));
1060 if (id->do_not_unshare)
1061 new_tree = *n;
1062 else
1063 new_tree = unshare_expr (*n);
1064 old = *tp;
1065 *tp = gimple_fold_indirect_ref (new_tree);
1066 if (! *tp)
1068 if (TREE_CODE (new_tree) == ADDR_EXPR)
1070 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1071 type, new_tree);
1072 /* ??? We should either assert here or build
1073 a VIEW_CONVERT_EXPR instead of blindly leaking
1074 incompatible types to our IL. */
1075 if (! *tp)
1076 *tp = TREE_OPERAND (new_tree, 0);
1078 else
1080 *tp = build1 (INDIRECT_REF, type, new_tree);
1081 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1082 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1085 *walk_subtrees = 0;
1086 return NULL;
1089 else if (TREE_CODE (*tp) == MEM_REF)
1091 /* We need to re-canonicalize MEM_REFs from inline substitutions
1092 that can happen when a pointer argument is an ADDR_EXPR. */
1093 tree decl = TREE_OPERAND (*tp, 0);
1094 tree *n;
1096 n = (tree *) pointer_map_contains (id->decl_map, decl);
1097 if (n)
1099 tree old = *tp;
1100 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1101 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1102 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1103 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1104 *walk_subtrees = 0;
1105 return NULL;
1109 /* Here is the "usual case". Copy this tree node, and then
1110 tweak some special cases. */
1111 copy_tree_r (tp, walk_subtrees, NULL);
1113 /* Global variables we haven't seen yet needs to go into referenced
1114 vars. If not referenced from types or debug stmts only. */
1115 if (gimple_in_ssa_p (cfun)
1116 && TREE_CODE (*tp) == VAR_DECL
1117 && id->remapping_type_depth == 0
1118 && !processing_debug_stmt)
1119 add_referenced_var (*tp);
1121 /* If EXPR has block defined, map it to newly constructed block.
1122 When inlining we want EXPRs without block appear in the block
1123 of function call if we are not remapping a type. */
1124 if (EXPR_P (*tp))
1126 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1127 if (TREE_BLOCK (*tp))
1129 tree *n;
1130 n = (tree *) pointer_map_contains (id->decl_map,
1131 TREE_BLOCK (*tp));
1132 gcc_assert (n || id->remapping_type_depth != 0);
1133 if (n)
1134 new_block = *n;
1136 TREE_BLOCK (*tp) = new_block;
1139 if (TREE_CODE (*tp) != OMP_CLAUSE)
1140 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1142 /* The copied TARGET_EXPR has never been expanded, even if the
1143 original node was expanded already. */
1144 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1146 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1147 TREE_OPERAND (*tp, 3) = NULL_TREE;
1150 /* Variable substitution need not be simple. In particular, the
1151 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1152 and friends are up-to-date. */
1153 else if (TREE_CODE (*tp) == ADDR_EXPR)
1155 int invariant = is_gimple_min_invariant (*tp);
1156 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1158 /* Handle the case where we substituted an INDIRECT_REF
1159 into the operand of the ADDR_EXPR. */
1160 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1161 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1162 else
1163 recompute_tree_invariant_for_addr_expr (*tp);
1165 /* If this used to be invariant, but is not any longer,
1166 then regimplification is probably needed. */
1167 if (invariant && !is_gimple_min_invariant (*tp))
1168 id->regimplify = true;
1170 *walk_subtrees = 0;
1174 /* Keep iterating. */
1175 return NULL_TREE;
1178 /* Helper for remap_gimple_stmt. Given an EH region number for the
1179 source function, map that to the duplicate EH region number in
1180 the destination function. */
1182 static int
1183 remap_eh_region_nr (int old_nr, copy_body_data *id)
1185 eh_region old_r, new_r;
1186 void **slot;
1188 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1189 slot = pointer_map_contains (id->eh_map, old_r);
1190 new_r = (eh_region) *slot;
1192 return new_r->index;
1195 /* Similar, but operate on INTEGER_CSTs. */
1197 static tree
1198 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1200 int old_nr, new_nr;
1202 old_nr = tree_low_cst (old_t_nr, 0);
1203 new_nr = remap_eh_region_nr (old_nr, id);
1205 return build_int_cst (NULL, new_nr);
1208 /* Helper for copy_bb. Remap statement STMT using the inlining
1209 information in ID. Return the new statement copy. */
1211 static gimple
1212 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1214 gimple copy = NULL;
1215 struct walk_stmt_info wi;
1216 tree new_block;
1217 bool skip_first = false;
1219 /* Begin by recognizing trees that we'll completely rewrite for the
1220 inlining context. Our output for these trees is completely
1221 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1222 into an edge). Further down, we'll handle trees that get
1223 duplicated and/or tweaked. */
1225 /* When requested, GIMPLE_RETURNs should be transformed to just the
1226 contained GIMPLE_ASSIGN. The branch semantics of the return will
1227 be handled elsewhere by manipulating the CFG rather than the
1228 statement. */
1229 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1231 tree retval = gimple_return_retval (stmt);
1233 /* If we're returning something, just turn that into an
1234 assignment into the equivalent of the original RESULT_DECL.
1235 If RETVAL is just the result decl, the result decl has
1236 already been set (e.g. a recent "foo (&result_decl, ...)");
1237 just toss the entire GIMPLE_RETURN. */
1238 if (retval && TREE_CODE (retval) != RESULT_DECL)
1240 copy = gimple_build_assign (id->retvar, retval);
1241 /* id->retvar is already substituted. Skip it on later remapping. */
1242 skip_first = true;
1244 else
1245 return gimple_build_nop ();
1247 else if (gimple_has_substatements (stmt))
1249 gimple_seq s1, s2;
1251 /* When cloning bodies from the C++ front end, we will be handed bodies
1252 in High GIMPLE form. Handle here all the High GIMPLE statements that
1253 have embedded statements. */
1254 switch (gimple_code (stmt))
1256 case GIMPLE_BIND:
1257 copy = copy_gimple_bind (stmt, id);
1258 break;
1260 case GIMPLE_CATCH:
1261 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1262 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1263 break;
1265 case GIMPLE_EH_FILTER:
1266 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1267 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1268 break;
1270 case GIMPLE_TRY:
1271 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1272 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1273 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1274 break;
1276 case GIMPLE_WITH_CLEANUP_EXPR:
1277 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1278 copy = gimple_build_wce (s1);
1279 break;
1281 case GIMPLE_OMP_PARALLEL:
1282 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1283 copy = gimple_build_omp_parallel
1284 (s1,
1285 gimple_omp_parallel_clauses (stmt),
1286 gimple_omp_parallel_child_fn (stmt),
1287 gimple_omp_parallel_data_arg (stmt));
1288 break;
1290 case GIMPLE_OMP_TASK:
1291 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1292 copy = gimple_build_omp_task
1293 (s1,
1294 gimple_omp_task_clauses (stmt),
1295 gimple_omp_task_child_fn (stmt),
1296 gimple_omp_task_data_arg (stmt),
1297 gimple_omp_task_copy_fn (stmt),
1298 gimple_omp_task_arg_size (stmt),
1299 gimple_omp_task_arg_align (stmt));
1300 break;
1302 case GIMPLE_OMP_FOR:
1303 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1304 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1305 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1306 gimple_omp_for_collapse (stmt), s2);
1308 size_t i;
1309 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1311 gimple_omp_for_set_index (copy, i,
1312 gimple_omp_for_index (stmt, i));
1313 gimple_omp_for_set_initial (copy, i,
1314 gimple_omp_for_initial (stmt, i));
1315 gimple_omp_for_set_final (copy, i,
1316 gimple_omp_for_final (stmt, i));
1317 gimple_omp_for_set_incr (copy, i,
1318 gimple_omp_for_incr (stmt, i));
1319 gimple_omp_for_set_cond (copy, i,
1320 gimple_omp_for_cond (stmt, i));
1323 break;
1325 case GIMPLE_OMP_MASTER:
1326 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1327 copy = gimple_build_omp_master (s1);
1328 break;
1330 case GIMPLE_OMP_ORDERED:
1331 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1332 copy = gimple_build_omp_ordered (s1);
1333 break;
1335 case GIMPLE_OMP_SECTION:
1336 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1337 copy = gimple_build_omp_section (s1);
1338 break;
1340 case GIMPLE_OMP_SECTIONS:
1341 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1342 copy = gimple_build_omp_sections
1343 (s1, gimple_omp_sections_clauses (stmt));
1344 break;
1346 case GIMPLE_OMP_SINGLE:
1347 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1348 copy = gimple_build_omp_single
1349 (s1, gimple_omp_single_clauses (stmt));
1350 break;
1352 case GIMPLE_OMP_CRITICAL:
1353 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1354 copy
1355 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1356 break;
1358 default:
1359 gcc_unreachable ();
1362 else
1364 if (gimple_assign_copy_p (stmt)
1365 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1366 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1368 /* Here we handle statements that are not completely rewritten.
1369 First we detect some inlining-induced bogosities for
1370 discarding. */
1372 /* Some assignments VAR = VAR; don't generate any rtl code
1373 and thus don't count as variable modification. Avoid
1374 keeping bogosities like 0 = 0. */
1375 tree decl = gimple_assign_lhs (stmt), value;
1376 tree *n;
1378 n = (tree *) pointer_map_contains (id->decl_map, decl);
1379 if (n)
1381 value = *n;
1382 STRIP_TYPE_NOPS (value);
1383 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1384 return gimple_build_nop ();
1388 if (gimple_debug_bind_p (stmt))
1390 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1391 gimple_debug_bind_get_value (stmt),
1392 stmt);
1393 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1394 return copy;
1397 /* Create a new deep copy of the statement. */
1398 copy = gimple_copy (stmt);
1400 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1401 RESX and EH_DISPATCH. */
1402 if (id->eh_map)
1403 switch (gimple_code (copy))
1405 case GIMPLE_CALL:
1407 tree r, fndecl = gimple_call_fndecl (copy);
1408 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1409 switch (DECL_FUNCTION_CODE (fndecl))
1411 case BUILT_IN_EH_COPY_VALUES:
1412 r = gimple_call_arg (copy, 1);
1413 r = remap_eh_region_tree_nr (r, id);
1414 gimple_call_set_arg (copy, 1, r);
1415 /* FALLTHRU */
1417 case BUILT_IN_EH_POINTER:
1418 case BUILT_IN_EH_FILTER:
1419 r = gimple_call_arg (copy, 0);
1420 r = remap_eh_region_tree_nr (r, id);
1421 gimple_call_set_arg (copy, 0, r);
1422 break;
1424 default:
1425 break;
1428 /* Reset alias info if we didn't apply measures to
1429 keep it valid over inlining by setting DECL_PT_UID. */
1430 if (!id->src_cfun->gimple_df
1431 || !id->src_cfun->gimple_df->ipa_pta)
1432 gimple_call_reset_alias_info (copy);
1434 break;
1436 case GIMPLE_RESX:
1438 int r = gimple_resx_region (copy);
1439 r = remap_eh_region_nr (r, id);
1440 gimple_resx_set_region (copy, r);
1442 break;
1444 case GIMPLE_EH_DISPATCH:
1446 int r = gimple_eh_dispatch_region (copy);
1447 r = remap_eh_region_nr (r, id);
1448 gimple_eh_dispatch_set_region (copy, r);
1450 break;
1452 default:
1453 break;
1457 /* If STMT has a block defined, map it to the newly constructed
1458 block. When inlining we want statements without a block to
1459 appear in the block of the function call. */
1460 new_block = id->block;
1461 if (gimple_block (copy))
1463 tree *n;
1464 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1465 gcc_assert (n);
1466 new_block = *n;
1469 gimple_set_block (copy, new_block);
1471 if (gimple_debug_bind_p (copy))
1472 return copy;
1474 /* Remap all the operands in COPY. */
1475 memset (&wi, 0, sizeof (wi));
1476 wi.info = id;
1477 if (skip_first)
1478 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1479 else
1480 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1482 /* Clear the copied virtual operands. We are not remapping them here
1483 but are going to recreate them from scratch. */
1484 if (gimple_has_mem_ops (copy))
1486 gimple_set_vdef (copy, NULL_TREE);
1487 gimple_set_vuse (copy, NULL_TREE);
1490 return copy;
1494 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1495 later */
1497 static basic_block
1498 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1499 gcov_type count_scale)
1501 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1502 basic_block copy_basic_block;
1503 tree decl;
1504 gcov_type freq;
1505 basic_block prev;
1507 /* Search for previous copied basic block. */
1508 prev = bb->prev_bb;
1509 while (!prev->aux)
1510 prev = prev->prev_bb;
1512 /* create_basic_block() will append every new block to
1513 basic_block_info automatically. */
1514 copy_basic_block = create_basic_block (NULL, (void *) 0,
1515 (basic_block) prev->aux);
1516 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1518 /* We are going to rebuild frequencies from scratch. These values
1519 have just small importance to drive canonicalize_loop_headers. */
1520 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1522 /* We recompute frequencies after inlining, so this is quite safe. */
1523 if (freq > BB_FREQ_MAX)
1524 freq = BB_FREQ_MAX;
1525 copy_basic_block->frequency = freq;
1527 copy_gsi = gsi_start_bb (copy_basic_block);
1529 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1531 gimple stmt = gsi_stmt (gsi);
1532 gimple orig_stmt = stmt;
1534 id->regimplify = false;
1535 stmt = remap_gimple_stmt (stmt, id);
1536 if (gimple_nop_p (stmt))
1537 continue;
1539 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1540 seq_gsi = copy_gsi;
1542 /* With return slot optimization we can end up with
1543 non-gimple (foo *)&this->m, fix that here. */
1544 if (is_gimple_assign (stmt)
1545 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1546 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1548 tree new_rhs;
1549 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1550 gimple_assign_rhs1 (stmt),
1551 true, NULL, false, GSI_NEW_STMT);
1552 gimple_assign_set_rhs1 (stmt, new_rhs);
1553 id->regimplify = false;
1556 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1558 if (id->regimplify)
1559 gimple_regimplify_operands (stmt, &seq_gsi);
1561 /* If copy_basic_block has been empty at the start of this iteration,
1562 call gsi_start_bb again to get at the newly added statements. */
1563 if (gsi_end_p (copy_gsi))
1564 copy_gsi = gsi_start_bb (copy_basic_block);
1565 else
1566 gsi_next (&copy_gsi);
1568 /* Process the new statement. The call to gimple_regimplify_operands
1569 possibly turned the statement into multiple statements, we
1570 need to process all of them. */
1573 tree fn;
1575 stmt = gsi_stmt (copy_gsi);
1576 if (is_gimple_call (stmt)
1577 && gimple_call_va_arg_pack_p (stmt)
1578 && id->gimple_call)
1580 /* __builtin_va_arg_pack () should be replaced by
1581 all arguments corresponding to ... in the caller. */
1582 tree p;
1583 gimple new_call;
1584 VEC(tree, heap) *argarray;
1585 size_t nargs = gimple_call_num_args (id->gimple_call);
1586 size_t n;
1588 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1589 nargs--;
1591 /* Create the new array of arguments. */
1592 n = nargs + gimple_call_num_args (stmt);
1593 argarray = VEC_alloc (tree, heap, n);
1594 VEC_safe_grow (tree, heap, argarray, n);
1596 /* Copy all the arguments before '...' */
1597 memcpy (VEC_address (tree, argarray),
1598 gimple_call_arg_ptr (stmt, 0),
1599 gimple_call_num_args (stmt) * sizeof (tree));
1601 /* Append the arguments passed in '...' */
1602 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1603 gimple_call_arg_ptr (id->gimple_call, 0)
1604 + (gimple_call_num_args (id->gimple_call) - nargs),
1605 nargs * sizeof (tree));
1607 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1608 argarray);
1610 VEC_free (tree, heap, argarray);
1612 /* Copy all GIMPLE_CALL flags, location and block, except
1613 GF_CALL_VA_ARG_PACK. */
1614 gimple_call_copy_flags (new_call, stmt);
1615 gimple_call_set_va_arg_pack (new_call, false);
1616 gimple_set_location (new_call, gimple_location (stmt));
1617 gimple_set_block (new_call, gimple_block (stmt));
1618 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1620 gsi_replace (&copy_gsi, new_call, false);
1621 stmt = new_call;
1623 else if (is_gimple_call (stmt)
1624 && id->gimple_call
1625 && (decl = gimple_call_fndecl (stmt))
1626 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1627 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1629 /* __builtin_va_arg_pack_len () should be replaced by
1630 the number of anonymous arguments. */
1631 size_t nargs = gimple_call_num_args (id->gimple_call);
1632 tree count, p;
1633 gimple new_stmt;
1635 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1636 nargs--;
1638 count = build_int_cst (integer_type_node, nargs);
1639 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1640 gsi_replace (&copy_gsi, new_stmt, false);
1641 stmt = new_stmt;
1644 /* Statements produced by inlining can be unfolded, especially
1645 when we constant propagated some operands. We can't fold
1646 them right now for two reasons:
1647 1) folding require SSA_NAME_DEF_STMTs to be correct
1648 2) we can't change function calls to builtins.
1649 So we just mark statement for later folding. We mark
1650 all new statements, instead just statements that has changed
1651 by some nontrivial substitution so even statements made
1652 foldable indirectly are updated. If this turns out to be
1653 expensive, copy_body can be told to watch for nontrivial
1654 changes. */
1655 if (id->statements_to_fold)
1656 pointer_set_insert (id->statements_to_fold, stmt);
1658 /* We're duplicating a CALL_EXPR. Find any corresponding
1659 callgraph edges and update or duplicate them. */
1660 if (is_gimple_call (stmt))
1662 struct cgraph_edge *edge;
1663 int flags;
1665 switch (id->transform_call_graph_edges)
1667 case CB_CGE_DUPLICATE:
1668 edge = cgraph_edge (id->src_node, orig_stmt);
1669 if (edge)
1671 int edge_freq = edge->frequency;
1672 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1673 gimple_uid (stmt),
1674 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1675 edge->frequency, true);
1676 /* We could also just rescale the frequency, but
1677 doing so would introduce roundoff errors and make
1678 verifier unhappy. */
1679 edge->frequency
1680 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1681 copy_basic_block);
1682 if (dump_file
1683 && profile_status_for_function (cfun) != PROFILE_ABSENT
1684 && (edge_freq > edge->frequency + 10
1685 || edge_freq < edge->frequency - 10))
1687 fprintf (dump_file, "Edge frequency estimated by "
1688 "cgraph %i diverge from inliner's estimate %i\n",
1689 edge_freq,
1690 edge->frequency);
1691 fprintf (dump_file,
1692 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1693 bb->index,
1694 bb->frequency,
1695 copy_basic_block->frequency);
1697 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1699 break;
1701 case CB_CGE_MOVE_CLONES:
1702 cgraph_set_call_stmt_including_clones (id->dst_node,
1703 orig_stmt, stmt);
1704 edge = cgraph_edge (id->dst_node, stmt);
1705 break;
1707 case CB_CGE_MOVE:
1708 edge = cgraph_edge (id->dst_node, orig_stmt);
1709 if (edge)
1710 cgraph_set_call_stmt (edge, stmt);
1711 break;
1713 default:
1714 gcc_unreachable ();
1717 /* Constant propagation on argument done during inlining
1718 may create new direct call. Produce an edge for it. */
1719 if ((!edge
1720 || (edge->indirect_inlining_edge
1721 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1722 && (fn = gimple_call_fndecl (stmt)) != NULL)
1724 struct cgraph_node *dest = cgraph_node (fn);
1726 /* We have missing edge in the callgraph. This can happen
1727 when previous inlining turned an indirect call into a
1728 direct call by constant propagating arguments or we are
1729 producing dead clone (for further cloning). In all
1730 other cases we hit a bug (incorrect node sharing is the
1731 most common reason for missing edges). */
1732 gcc_assert (dest->needed || !dest->analyzed
1733 || dest->address_taken
1734 || !id->src_node->analyzed);
1735 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1736 cgraph_create_edge_including_clones
1737 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1738 compute_call_stmt_bb_frequency (id->dst_node->decl,
1739 copy_basic_block),
1740 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1741 else
1742 cgraph_create_edge (id->dst_node, dest, stmt,
1743 bb->count,
1744 compute_call_stmt_bb_frequency
1745 (id->dst_node->decl, copy_basic_block),
1746 bb->loop_depth)->inline_failed
1747 = CIF_ORIGINALLY_INDIRECT_CALL;
1748 if (dump_file)
1750 fprintf (dump_file, "Created new direct edge to %s\n",
1751 cgraph_node_name (dest));
1755 flags = gimple_call_flags (stmt);
1756 if (flags & ECF_MAY_BE_ALLOCA)
1757 cfun->calls_alloca = true;
1758 if (flags & ECF_RETURNS_TWICE)
1759 cfun->calls_setjmp = true;
1762 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1763 id->eh_map, id->eh_lp_nr);
1765 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1767 ssa_op_iter i;
1768 tree def;
1770 find_new_referenced_vars (gsi_stmt (copy_gsi));
1771 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1772 if (TREE_CODE (def) == SSA_NAME)
1773 SSA_NAME_DEF_STMT (def) = stmt;
1776 gsi_next (&copy_gsi);
1778 while (!gsi_end_p (copy_gsi));
1780 copy_gsi = gsi_last_bb (copy_basic_block);
1783 return copy_basic_block;
1786 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1787 form is quite easy, since dominator relationship for old basic blocks does
1788 not change.
1790 There is however exception where inlining might change dominator relation
1791 across EH edges from basic block within inlined functions destinating
1792 to landing pads in function we inline into.
1794 The function fills in PHI_RESULTs of such PHI nodes if they refer
1795 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1796 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1797 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1798 set, and this means that there will be no overlapping live ranges
1799 for the underlying symbol.
1801 This might change in future if we allow redirecting of EH edges and
1802 we might want to change way build CFG pre-inlining to include
1803 all the possible edges then. */
1804 static void
1805 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1806 bool can_throw, bool nonlocal_goto)
1808 edge e;
1809 edge_iterator ei;
1811 FOR_EACH_EDGE (e, ei, bb->succs)
1812 if (!e->dest->aux
1813 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1815 gimple phi;
1816 gimple_stmt_iterator si;
1818 if (!nonlocal_goto)
1819 gcc_assert (e->flags & EDGE_EH);
1821 if (!can_throw)
1822 gcc_assert (!(e->flags & EDGE_EH));
1824 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1826 edge re;
1828 phi = gsi_stmt (si);
1830 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1831 gcc_assert (!e->dest->aux);
1833 gcc_assert ((e->flags & EDGE_EH)
1834 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1836 if (!is_gimple_reg (PHI_RESULT (phi)))
1838 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1839 continue;
1842 re = find_edge (ret_bb, e->dest);
1843 gcc_assert (re);
1844 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1845 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1847 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1848 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1854 /* Copy edges from BB into its copy constructed earlier, scale profile
1855 accordingly. Edges will be taken care of later. Assume aux
1856 pointers to point to the copies of each BB. Return true if any
1857 debug stmts are left after a statement that must end the basic block. */
1859 static bool
1860 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1862 basic_block new_bb = (basic_block) bb->aux;
1863 edge_iterator ei;
1864 edge old_edge;
1865 gimple_stmt_iterator si;
1866 int flags;
1867 bool need_debug_cleanup = false;
1869 /* Use the indices from the original blocks to create edges for the
1870 new ones. */
1871 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1872 if (!(old_edge->flags & EDGE_EH))
1874 edge new_edge;
1876 flags = old_edge->flags;
1878 /* Return edges do get a FALLTHRU flag when the get inlined. */
1879 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1880 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1881 flags |= EDGE_FALLTHRU;
1882 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1883 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1884 new_edge->probability = old_edge->probability;
1887 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1888 return false;
1890 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1892 gimple copy_stmt;
1893 bool can_throw, nonlocal_goto;
1895 copy_stmt = gsi_stmt (si);
1896 if (!is_gimple_debug (copy_stmt))
1898 update_stmt (copy_stmt);
1899 if (gimple_in_ssa_p (cfun))
1900 mark_symbols_for_renaming (copy_stmt);
1903 /* Do this before the possible split_block. */
1904 gsi_next (&si);
1906 /* If this tree could throw an exception, there are two
1907 cases where we need to add abnormal edge(s): the
1908 tree wasn't in a region and there is a "current
1909 region" in the caller; or the original tree had
1910 EH edges. In both cases split the block after the tree,
1911 and add abnormal edge(s) as needed; we need both
1912 those from the callee and the caller.
1913 We check whether the copy can throw, because the const
1914 propagation can change an INDIRECT_REF which throws
1915 into a COMPONENT_REF which doesn't. If the copy
1916 can throw, the original could also throw. */
1917 can_throw = stmt_can_throw_internal (copy_stmt);
1918 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1920 if (can_throw || nonlocal_goto)
1922 if (!gsi_end_p (si))
1924 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1925 gsi_next (&si);
1926 if (gsi_end_p (si))
1927 need_debug_cleanup = true;
1929 if (!gsi_end_p (si))
1930 /* Note that bb's predecessor edges aren't necessarily
1931 right at this point; split_block doesn't care. */
1933 edge e = split_block (new_bb, copy_stmt);
1935 new_bb = e->dest;
1936 new_bb->aux = e->src->aux;
1937 si = gsi_start_bb (new_bb);
1941 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1942 make_eh_dispatch_edges (copy_stmt);
1943 else if (can_throw)
1944 make_eh_edges (copy_stmt);
1946 if (nonlocal_goto)
1947 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1949 if ((can_throw || nonlocal_goto)
1950 && gimple_in_ssa_p (cfun))
1951 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1952 can_throw, nonlocal_goto);
1954 return need_debug_cleanup;
1957 /* Copy the PHIs. All blocks and edges are copied, some blocks
1958 was possibly split and new outgoing EH edges inserted.
1959 BB points to the block of original function and AUX pointers links
1960 the original and newly copied blocks. */
1962 static void
1963 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1965 basic_block const new_bb = (basic_block) bb->aux;
1966 edge_iterator ei;
1967 gimple phi;
1968 gimple_stmt_iterator si;
1970 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1972 tree res, new_res;
1973 gimple new_phi;
1974 edge new_edge;
1976 phi = gsi_stmt (si);
1977 res = PHI_RESULT (phi);
1978 new_res = res;
1979 if (is_gimple_reg (res))
1981 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1982 SSA_NAME_DEF_STMT (new_res)
1983 = new_phi = create_phi_node (new_res, new_bb);
1984 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1986 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
1987 tree arg;
1988 tree new_arg;
1989 tree block = id->block;
1990 edge_iterator ei2;
1992 /* When doing partial cloning, we allow PHIs on the entry block
1993 as long as all the arguments are the same. Find any input
1994 edge to see argument to copy. */
1995 if (!old_edge)
1996 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
1997 if (!old_edge->src->aux)
1998 break;
2000 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2001 new_arg = arg;
2002 id->block = NULL_TREE;
2003 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2004 id->block = block;
2005 gcc_assert (new_arg);
2006 /* With return slot optimization we can end up with
2007 non-gimple (foo *)&this->m, fix that here. */
2008 if (TREE_CODE (new_arg) != SSA_NAME
2009 && TREE_CODE (new_arg) != FUNCTION_DECL
2010 && !is_gimple_val (new_arg))
2012 gimple_seq stmts = NULL;
2013 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2014 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
2016 add_phi_arg (new_phi, new_arg, new_edge,
2017 gimple_phi_arg_location_from_edge (phi, old_edge));
2024 /* Wrapper for remap_decl so it can be used as a callback. */
2026 static tree
2027 remap_decl_1 (tree decl, void *data)
2029 return remap_decl (decl, (copy_body_data *) data);
2032 /* Build struct function and associated datastructures for the new clone
2033 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2035 static void
2036 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2038 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2039 gcov_type count_scale;
2041 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2042 count_scale = (REG_BR_PROB_BASE * count
2043 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2044 else
2045 count_scale = REG_BR_PROB_BASE;
2047 /* Register specific tree functions. */
2048 gimple_register_cfg_hooks ();
2050 /* Get clean struct function. */
2051 push_struct_function (new_fndecl);
2053 /* We will rebuild these, so just sanity check that they are empty. */
2054 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2055 gcc_assert (cfun->local_decls == NULL);
2056 gcc_assert (cfun->cfg == NULL);
2057 gcc_assert (cfun->decl == new_fndecl);
2059 /* Copy items we preserve during cloning. */
2060 cfun->static_chain_decl = src_cfun->static_chain_decl;
2061 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2062 cfun->function_end_locus = src_cfun->function_end_locus;
2063 cfun->curr_properties = src_cfun->curr_properties;
2064 cfun->last_verified = src_cfun->last_verified;
2065 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2066 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2067 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2068 cfun->stdarg = src_cfun->stdarg;
2069 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2070 cfun->after_inlining = src_cfun->after_inlining;
2071 cfun->can_throw_non_call_exceptions
2072 = src_cfun->can_throw_non_call_exceptions;
2073 cfun->returns_struct = src_cfun->returns_struct;
2074 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2075 cfun->after_tree_profile = src_cfun->after_tree_profile;
2077 init_empty_tree_cfg ();
2079 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2080 ENTRY_BLOCK_PTR->count =
2081 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2082 REG_BR_PROB_BASE);
2083 ENTRY_BLOCK_PTR->frequency
2084 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2085 EXIT_BLOCK_PTR->count =
2086 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2087 REG_BR_PROB_BASE);
2088 EXIT_BLOCK_PTR->frequency =
2089 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2090 if (src_cfun->eh)
2091 init_eh_for_function ();
2093 if (src_cfun->gimple_df)
2095 init_tree_ssa (cfun);
2096 cfun->gimple_df->in_ssa_p = true;
2097 init_ssa_operands ();
2099 pop_cfun ();
2102 /* Helper function for copy_cfg_body. Move debug stmts from the end
2103 of NEW_BB to the beginning of successor basic blocks when needed. If the
2104 successor has multiple predecessors, reset them, otherwise keep
2105 their value. */
2107 static void
2108 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2110 edge e;
2111 edge_iterator ei;
2112 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2114 if (gsi_end_p (si)
2115 || gsi_one_before_end_p (si)
2116 || !(stmt_can_throw_internal (gsi_stmt (si))
2117 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2118 return;
2120 FOR_EACH_EDGE (e, ei, new_bb->succs)
2122 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2123 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2124 while (is_gimple_debug (gsi_stmt (ssi)))
2126 gimple stmt = gsi_stmt (ssi), new_stmt;
2127 tree var;
2128 tree value;
2130 /* For the last edge move the debug stmts instead of copying
2131 them. */
2132 if (ei_one_before_end_p (ei))
2134 si = ssi;
2135 gsi_prev (&ssi);
2136 if (!single_pred_p (e->dest))
2137 gimple_debug_bind_reset_value (stmt);
2138 gsi_remove (&si, false);
2139 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2140 continue;
2143 var = gimple_debug_bind_get_var (stmt);
2144 if (single_pred_p (e->dest))
2146 value = gimple_debug_bind_get_value (stmt);
2147 value = unshare_expr (value);
2149 else
2150 value = NULL_TREE;
2151 new_stmt = gimple_build_debug_bind (var, value, stmt);
2152 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2153 VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2154 gsi_prev (&ssi);
2159 /* Make a copy of the body of FN so that it can be inserted inline in
2160 another function. Walks FN via CFG, returns new fndecl. */
2162 static tree
2163 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2164 basic_block entry_block_map, basic_block exit_block_map,
2165 bitmap blocks_to_copy, basic_block new_entry)
2167 tree callee_fndecl = id->src_fn;
2168 /* Original cfun for the callee, doesn't change. */
2169 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2170 struct function *cfun_to_copy;
2171 basic_block bb;
2172 tree new_fndecl = NULL;
2173 bool need_debug_cleanup = false;
2174 gcov_type count_scale;
2175 int last;
2176 int incoming_frequency = 0;
2177 gcov_type incoming_count = 0;
2179 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2180 count_scale = (REG_BR_PROB_BASE * count
2181 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2182 else
2183 count_scale = REG_BR_PROB_BASE;
2185 /* Register specific tree functions. */
2186 gimple_register_cfg_hooks ();
2188 /* If we are inlining just region of the function, make sure to connect new entry
2189 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2190 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2191 probabilities of edges incoming from nonduplicated region. */
2192 if (new_entry)
2194 edge e;
2195 edge_iterator ei;
2197 FOR_EACH_EDGE (e, ei, new_entry->preds)
2198 if (!e->src->aux)
2200 incoming_frequency += EDGE_FREQUENCY (e);
2201 incoming_count += e->count;
2203 incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2204 incoming_frequency
2205 = incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2206 ENTRY_BLOCK_PTR->count = incoming_count;
2207 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2210 /* Must have a CFG here at this point. */
2211 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2212 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2214 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2216 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2217 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2218 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2219 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2221 /* Duplicate any exception-handling regions. */
2222 if (cfun->eh)
2223 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2224 remap_decl_1, id);
2226 /* Use aux pointers to map the original blocks to copy. */
2227 FOR_EACH_BB_FN (bb, cfun_to_copy)
2228 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2230 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2231 bb->aux = new_bb;
2232 new_bb->aux = bb;
2235 last = last_basic_block;
2237 /* Now that we've duplicated the blocks, duplicate their edges. */
2238 FOR_ALL_BB_FN (bb, cfun_to_copy)
2239 if (!blocks_to_copy
2240 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2241 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2243 if (new_entry)
2245 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2246 e->probability = REG_BR_PROB_BASE;
2247 e->count = incoming_count;
2250 if (gimple_in_ssa_p (cfun))
2251 FOR_ALL_BB_FN (bb, cfun_to_copy)
2252 if (!blocks_to_copy
2253 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2254 copy_phis_for_bb (bb, id);
2256 FOR_ALL_BB_FN (bb, cfun_to_copy)
2257 if (bb->aux)
2259 if (need_debug_cleanup
2260 && bb->index != ENTRY_BLOCK
2261 && bb->index != EXIT_BLOCK)
2262 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2263 ((basic_block)bb->aux)->aux = NULL;
2264 bb->aux = NULL;
2267 /* Zero out AUX fields of newly created block during EH edge
2268 insertion. */
2269 for (; last < last_basic_block; last++)
2271 if (need_debug_cleanup)
2272 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2273 BASIC_BLOCK (last)->aux = NULL;
2275 entry_block_map->aux = NULL;
2276 exit_block_map->aux = NULL;
2278 if (id->eh_map)
2280 pointer_map_destroy (id->eh_map);
2281 id->eh_map = NULL;
2284 return new_fndecl;
2287 /* Copy the debug STMT using ID. We deal with these statements in a
2288 special way: if any variable in their VALUE expression wasn't
2289 remapped yet, we won't remap it, because that would get decl uids
2290 out of sync, causing codegen differences between -g and -g0. If
2291 this arises, we drop the VALUE expression altogether. */
2293 static void
2294 copy_debug_stmt (gimple stmt, copy_body_data *id)
2296 tree t, *n;
2297 struct walk_stmt_info wi;
2299 t = id->block;
2300 if (gimple_block (stmt))
2302 tree *n;
2303 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2304 if (n)
2305 t = *n;
2307 gimple_set_block (stmt, t);
2309 /* Remap all the operands in COPY. */
2310 memset (&wi, 0, sizeof (wi));
2311 wi.info = id;
2313 processing_debug_stmt = 1;
2315 t = gimple_debug_bind_get_var (stmt);
2317 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2318 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2320 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2321 t = *n;
2323 else if (TREE_CODE (t) == VAR_DECL
2324 && !TREE_STATIC (t)
2325 && gimple_in_ssa_p (cfun)
2326 && !pointer_map_contains (id->decl_map, t)
2327 && !var_ann (t))
2328 /* T is a non-localized variable. */;
2329 else
2330 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2332 gimple_debug_bind_set_var (stmt, t);
2334 if (gimple_debug_bind_has_value_p (stmt))
2335 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2336 remap_gimple_op_r, &wi, NULL);
2338 /* Punt if any decl couldn't be remapped. */
2339 if (processing_debug_stmt < 0)
2340 gimple_debug_bind_reset_value (stmt);
2342 processing_debug_stmt = 0;
2344 update_stmt (stmt);
2345 if (gimple_in_ssa_p (cfun))
2346 mark_symbols_for_renaming (stmt);
2349 /* Process deferred debug stmts. In order to give values better odds
2350 of being successfully remapped, we delay the processing of debug
2351 stmts until all other stmts that might require remapping are
2352 processed. */
2354 static void
2355 copy_debug_stmts (copy_body_data *id)
2357 size_t i;
2358 gimple stmt;
2360 if (!id->debug_stmts)
2361 return;
2363 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2364 copy_debug_stmt (stmt, id);
2366 VEC_free (gimple, heap, id->debug_stmts);
2369 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2370 another function. */
2372 static tree
2373 copy_tree_body (copy_body_data *id)
2375 tree fndecl = id->src_fn;
2376 tree body = DECL_SAVED_TREE (fndecl);
2378 walk_tree (&body, copy_tree_body_r, id, NULL);
2380 return body;
2383 /* Make a copy of the body of FN so that it can be inserted inline in
2384 another function. */
2386 static tree
2387 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2388 basic_block entry_block_map, basic_block exit_block_map,
2389 bitmap blocks_to_copy, basic_block new_entry)
2391 tree fndecl = id->src_fn;
2392 tree body;
2394 /* If this body has a CFG, walk CFG and copy. */
2395 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2396 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2397 blocks_to_copy, new_entry);
2398 copy_debug_stmts (id);
2400 return body;
2403 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2404 defined in function FN, or of a data member thereof. */
2406 static bool
2407 self_inlining_addr_expr (tree value, tree fn)
2409 tree var;
2411 if (TREE_CODE (value) != ADDR_EXPR)
2412 return false;
2414 var = get_base_address (TREE_OPERAND (value, 0));
2416 return var && auto_var_in_fn_p (var, fn);
2419 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2420 lexical block and line number information from base_stmt, if given,
2421 or from the last stmt of the block otherwise. */
2423 static gimple
2424 insert_init_debug_bind (copy_body_data *id,
2425 basic_block bb, tree var, tree value,
2426 gimple base_stmt)
2428 gimple note;
2429 gimple_stmt_iterator gsi;
2430 tree tracked_var;
2432 if (!gimple_in_ssa_p (id->src_cfun))
2433 return NULL;
2435 if (!MAY_HAVE_DEBUG_STMTS)
2436 return NULL;
2438 tracked_var = target_for_debug_bind (var);
2439 if (!tracked_var)
2440 return NULL;
2442 if (bb)
2444 gsi = gsi_last_bb (bb);
2445 if (!base_stmt && !gsi_end_p (gsi))
2446 base_stmt = gsi_stmt (gsi);
2449 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2451 if (bb)
2453 if (!gsi_end_p (gsi))
2454 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2455 else
2456 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2459 return note;
2462 static void
2463 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2465 /* If VAR represents a zero-sized variable, it's possible that the
2466 assignment statement may result in no gimple statements. */
2467 if (init_stmt)
2469 gimple_stmt_iterator si = gsi_last_bb (bb);
2471 /* We can end up with init statements that store to a non-register
2472 from a rhs with a conversion. Handle that here by forcing the
2473 rhs into a temporary. gimple_regimplify_operands is not
2474 prepared to do this for us. */
2475 if (!is_gimple_debug (init_stmt)
2476 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2477 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2478 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2480 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2481 gimple_expr_type (init_stmt),
2482 gimple_assign_rhs1 (init_stmt));
2483 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2484 GSI_NEW_STMT);
2485 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2486 gimple_assign_set_rhs1 (init_stmt, rhs);
2488 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2489 gimple_regimplify_operands (init_stmt, &si);
2490 mark_symbols_for_renaming (init_stmt);
2492 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2494 tree var, def = gimple_assign_lhs (init_stmt);
2496 if (TREE_CODE (def) == SSA_NAME)
2497 var = SSA_NAME_VAR (def);
2498 else
2499 var = def;
2501 insert_init_debug_bind (id, bb, var, def, init_stmt);
2506 /* Initialize parameter P with VALUE. If needed, produce init statement
2507 at the end of BB. When BB is NULL, we return init statement to be
2508 output later. */
2509 static gimple
2510 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2511 basic_block bb, tree *vars)
2513 gimple init_stmt = NULL;
2514 tree var;
2515 tree rhs = value;
2516 tree def = (gimple_in_ssa_p (cfun)
2517 ? gimple_default_def (id->src_cfun, p) : NULL);
2519 if (value
2520 && value != error_mark_node
2521 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2523 if (fold_convertible_p (TREE_TYPE (p), value))
2524 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2525 else
2526 /* ??? For valid (GIMPLE) programs we should not end up here.
2527 Still if something has gone wrong and we end up with truly
2528 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2529 to not leak invalid GIMPLE to the following passes. */
2530 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2533 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2534 here since the type of this decl must be visible to the calling
2535 function. */
2536 var = copy_decl_to_var (p, id);
2538 /* We're actually using the newly-created var. */
2539 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2541 get_var_ann (var);
2542 add_referenced_var (var);
2545 /* Declare this new variable. */
2546 TREE_CHAIN (var) = *vars;
2547 *vars = var;
2549 /* Make gimplifier happy about this variable. */
2550 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2552 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2553 we would not need to create a new variable here at all, if it
2554 weren't for debug info. Still, we can just use the argument
2555 value. */
2556 if (TREE_READONLY (p)
2557 && !TREE_ADDRESSABLE (p)
2558 && value && !TREE_SIDE_EFFECTS (value)
2559 && !def)
2561 /* We may produce non-gimple trees by adding NOPs or introduce
2562 invalid sharing when operand is not really constant.
2563 It is not big deal to prohibit constant propagation here as
2564 we will constant propagate in DOM1 pass anyway. */
2565 if (is_gimple_min_invariant (value)
2566 && useless_type_conversion_p (TREE_TYPE (p),
2567 TREE_TYPE (value))
2568 /* We have to be very careful about ADDR_EXPR. Make sure
2569 the base variable isn't a local variable of the inlined
2570 function, e.g., when doing recursive inlining, direct or
2571 mutually-recursive or whatever, which is why we don't
2572 just test whether fn == current_function_decl. */
2573 && ! self_inlining_addr_expr (value, fn))
2575 insert_decl_map (id, p, value);
2576 insert_debug_decl_map (id, p, var);
2577 return insert_init_debug_bind (id, bb, var, value, NULL);
2581 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2582 that way, when the PARM_DECL is encountered, it will be
2583 automatically replaced by the VAR_DECL. */
2584 insert_decl_map (id, p, var);
2586 /* Even if P was TREE_READONLY, the new VAR should not be.
2587 In the original code, we would have constructed a
2588 temporary, and then the function body would have never
2589 changed the value of P. However, now, we will be
2590 constructing VAR directly. The constructor body may
2591 change its value multiple times as it is being
2592 constructed. Therefore, it must not be TREE_READONLY;
2593 the back-end assumes that TREE_READONLY variable is
2594 assigned to only once. */
2595 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2596 TREE_READONLY (var) = 0;
2598 /* If there is no setup required and we are in SSA, take the easy route
2599 replacing all SSA names representing the function parameter by the
2600 SSA name passed to function.
2602 We need to construct map for the variable anyway as it might be used
2603 in different SSA names when parameter is set in function.
2605 Do replacement at -O0 for const arguments replaced by constant.
2606 This is important for builtin_constant_p and other construct requiring
2607 constant argument to be visible in inlined function body. */
2608 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2609 && (optimize
2610 || (TREE_READONLY (p)
2611 && is_gimple_min_invariant (rhs)))
2612 && (TREE_CODE (rhs) == SSA_NAME
2613 || is_gimple_min_invariant (rhs))
2614 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2616 insert_decl_map (id, def, rhs);
2617 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2620 /* If the value of argument is never used, don't care about initializing
2621 it. */
2622 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2624 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2625 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2628 /* Initialize this VAR_DECL from the equivalent argument. Convert
2629 the argument to the proper type in case it was promoted. */
2630 if (value)
2632 if (rhs == error_mark_node)
2634 insert_decl_map (id, p, var);
2635 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2638 STRIP_USELESS_TYPE_CONVERSION (rhs);
2640 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2641 keep our trees in gimple form. */
2642 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2644 def = remap_ssa_name (def, id);
2645 init_stmt = gimple_build_assign (def, rhs);
2646 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2647 set_default_def (var, NULL);
2649 else
2650 init_stmt = gimple_build_assign (var, rhs);
2652 if (bb && init_stmt)
2653 insert_init_stmt (id, bb, init_stmt);
2655 return init_stmt;
2658 /* Generate code to initialize the parameters of the function at the
2659 top of the stack in ID from the GIMPLE_CALL STMT. */
2661 static void
2662 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2663 tree fn, basic_block bb)
2665 tree parms;
2666 size_t i;
2667 tree p;
2668 tree vars = NULL_TREE;
2669 tree static_chain = gimple_call_chain (stmt);
2671 /* Figure out what the parameters are. */
2672 parms = DECL_ARGUMENTS (fn);
2674 /* Loop through the parameter declarations, replacing each with an
2675 equivalent VAR_DECL, appropriately initialized. */
2676 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2678 tree val;
2679 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2680 setup_one_parameter (id, p, val, fn, bb, &vars);
2682 /* After remapping parameters remap their types. This has to be done
2683 in a second loop over all parameters to appropriately remap
2684 variable sized arrays when the size is specified in a
2685 parameter following the array. */
2686 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2688 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2689 if (varp
2690 && TREE_CODE (*varp) == VAR_DECL)
2692 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2693 ? gimple_default_def (id->src_cfun, p) : NULL);
2694 tree var = *varp;
2695 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2696 /* Also remap the default definition if it was remapped
2697 to the default definition of the parameter replacement
2698 by the parameter setup. */
2699 if (def)
2701 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2702 if (defp
2703 && TREE_CODE (*defp) == SSA_NAME
2704 && SSA_NAME_VAR (*defp) == var)
2705 TREE_TYPE (*defp) = TREE_TYPE (var);
2710 /* Initialize the static chain. */
2711 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2712 gcc_assert (fn != current_function_decl);
2713 if (p)
2715 /* No static chain? Seems like a bug in tree-nested.c. */
2716 gcc_assert (static_chain);
2718 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2721 declare_inline_vars (id->block, vars);
2725 /* Declare a return variable to replace the RESULT_DECL for the
2726 function we are calling. An appropriate DECL_STMT is returned.
2727 The USE_STMT is filled to contain a use of the declaration to
2728 indicate the return value of the function.
2730 RETURN_SLOT, if non-null is place where to store the result. It
2731 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2732 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2734 The return value is a (possibly null) value that holds the result
2735 as seen by the caller. */
2737 static tree
2738 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
2740 tree callee = id->src_fn;
2741 tree caller = id->dst_fn;
2742 tree result = DECL_RESULT (callee);
2743 tree callee_type = TREE_TYPE (result);
2744 tree caller_type;
2745 tree var, use;
2747 /* Handle type-mismatches in the function declaration return type
2748 vs. the call expression. */
2749 if (modify_dest)
2750 caller_type = TREE_TYPE (modify_dest);
2751 else
2752 caller_type = TREE_TYPE (TREE_TYPE (callee));
2754 /* We don't need to do anything for functions that don't return
2755 anything. */
2756 if (!result || VOID_TYPE_P (callee_type))
2757 return NULL_TREE;
2759 /* If there was a return slot, then the return value is the
2760 dereferenced address of that object. */
2761 if (return_slot)
2763 /* The front end shouldn't have used both return_slot and
2764 a modify expression. */
2765 gcc_assert (!modify_dest);
2766 if (DECL_BY_REFERENCE (result))
2768 tree return_slot_addr = build_fold_addr_expr (return_slot);
2769 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2771 /* We are going to construct *&return_slot and we can't do that
2772 for variables believed to be not addressable.
2774 FIXME: This check possibly can match, because values returned
2775 via return slot optimization are not believed to have address
2776 taken by alias analysis. */
2777 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2778 var = return_slot_addr;
2780 else
2782 var = return_slot;
2783 gcc_assert (TREE_CODE (var) != SSA_NAME);
2784 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2786 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2787 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2788 && !DECL_GIMPLE_REG_P (result)
2789 && DECL_P (var))
2790 DECL_GIMPLE_REG_P (var) = 0;
2791 use = NULL;
2792 goto done;
2795 /* All types requiring non-trivial constructors should have been handled. */
2796 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2798 /* Attempt to avoid creating a new temporary variable. */
2799 if (modify_dest
2800 && TREE_CODE (modify_dest) != SSA_NAME)
2802 bool use_it = false;
2804 /* We can't use MODIFY_DEST if there's type promotion involved. */
2805 if (!useless_type_conversion_p (callee_type, caller_type))
2806 use_it = false;
2808 /* ??? If we're assigning to a variable sized type, then we must
2809 reuse the destination variable, because we've no good way to
2810 create variable sized temporaries at this point. */
2811 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2812 use_it = true;
2814 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2815 reuse it as the result of the call directly. Don't do this if
2816 it would promote MODIFY_DEST to addressable. */
2817 else if (TREE_ADDRESSABLE (result))
2818 use_it = false;
2819 else
2821 tree base_m = get_base_address (modify_dest);
2823 /* If the base isn't a decl, then it's a pointer, and we don't
2824 know where that's going to go. */
2825 if (!DECL_P (base_m))
2826 use_it = false;
2827 else if (is_global_var (base_m))
2828 use_it = false;
2829 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2830 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2831 && !DECL_GIMPLE_REG_P (result)
2832 && DECL_GIMPLE_REG_P (base_m))
2833 use_it = false;
2834 else if (!TREE_ADDRESSABLE (base_m))
2835 use_it = true;
2838 if (use_it)
2840 var = modify_dest;
2841 use = NULL;
2842 goto done;
2846 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2848 var = copy_result_decl_to_var (result, id);
2849 if (gimple_in_ssa_p (cfun))
2851 get_var_ann (var);
2852 add_referenced_var (var);
2855 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2856 add_local_decl (DECL_STRUCT_FUNCTION (caller), var);
2858 /* Do not have the rest of GCC warn about this variable as it should
2859 not be visible to the user. */
2860 TREE_NO_WARNING (var) = 1;
2862 declare_inline_vars (id->block, var);
2864 /* Build the use expr. If the return type of the function was
2865 promoted, convert it back to the expected type. */
2866 use = var;
2867 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2868 use = fold_convert (caller_type, var);
2870 STRIP_USELESS_TYPE_CONVERSION (use);
2872 if (DECL_BY_REFERENCE (result))
2874 TREE_ADDRESSABLE (var) = 1;
2875 var = build_fold_addr_expr (var);
2878 done:
2879 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2880 way, when the RESULT_DECL is encountered, it will be
2881 automatically replaced by the VAR_DECL. */
2882 insert_decl_map (id, result, var);
2884 /* Remember this so we can ignore it in remap_decls. */
2885 id->retvar = var;
2887 return use;
2890 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2891 to a local label. */
2893 static tree
2894 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2896 tree node = *nodep;
2897 tree fn = (tree) fnp;
2899 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2900 return node;
2902 if (TYPE_P (node))
2903 *walk_subtrees = 0;
2905 return NULL_TREE;
2908 /* Determine if the function can be copied. If so return NULL. If
2909 not return a string describng the reason for failure. */
2911 static const char *
2912 copy_forbidden (struct function *fun, tree fndecl)
2914 const char *reason = fun->cannot_be_copied_reason;
2915 tree decl;
2916 unsigned ix;
2918 /* Only examine the function once. */
2919 if (fun->cannot_be_copied_set)
2920 return reason;
2922 /* We cannot copy a function that receives a non-local goto
2923 because we cannot remap the destination label used in the
2924 function that is performing the non-local goto. */
2925 /* ??? Actually, this should be possible, if we work at it.
2926 No doubt there's just a handful of places that simply
2927 assume it doesn't happen and don't substitute properly. */
2928 if (fun->has_nonlocal_label)
2930 reason = G_("function %q+F can never be copied "
2931 "because it receives a non-local goto");
2932 goto fail;
2935 FOR_EACH_LOCAL_DECL (fun, ix, decl)
2936 if (TREE_CODE (decl) == VAR_DECL
2937 && TREE_STATIC (decl)
2938 && !DECL_EXTERNAL (decl)
2939 && DECL_INITIAL (decl)
2940 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2941 has_label_address_in_static_1,
2942 fndecl))
2944 reason = G_("function %q+F can never be copied because it saves "
2945 "address of local label in a static variable");
2946 goto fail;
2949 fail:
2950 fun->cannot_be_copied_reason = reason;
2951 fun->cannot_be_copied_set = true;
2952 return reason;
2956 static const char *inline_forbidden_reason;
2958 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2959 iff a function can not be inlined. Also sets the reason why. */
2961 static tree
2962 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2963 struct walk_stmt_info *wip)
2965 tree fn = (tree) wip->info;
2966 tree t;
2967 gimple stmt = gsi_stmt (*gsi);
2969 switch (gimple_code (stmt))
2971 case GIMPLE_CALL:
2972 /* Refuse to inline alloca call unless user explicitly forced so as
2973 this may change program's memory overhead drastically when the
2974 function using alloca is called in loop. In GCC present in
2975 SPEC2000 inlining into schedule_block cause it to require 2GB of
2976 RAM instead of 256MB. */
2977 if (gimple_alloca_call_p (stmt)
2978 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2980 inline_forbidden_reason
2981 = G_("function %q+F can never be inlined because it uses "
2982 "alloca (override using the always_inline attribute)");
2983 *handled_ops_p = true;
2984 return fn;
2987 t = gimple_call_fndecl (stmt);
2988 if (t == NULL_TREE)
2989 break;
2991 /* We cannot inline functions that call setjmp. */
2992 if (setjmp_call_p (t))
2994 inline_forbidden_reason
2995 = G_("function %q+F can never be inlined because it uses setjmp");
2996 *handled_ops_p = true;
2997 return t;
3000 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3001 switch (DECL_FUNCTION_CODE (t))
3003 /* We cannot inline functions that take a variable number of
3004 arguments. */
3005 case BUILT_IN_VA_START:
3006 case BUILT_IN_NEXT_ARG:
3007 case BUILT_IN_VA_END:
3008 inline_forbidden_reason
3009 = G_("function %q+F can never be inlined because it "
3010 "uses variable argument lists");
3011 *handled_ops_p = true;
3012 return t;
3014 case BUILT_IN_LONGJMP:
3015 /* We can't inline functions that call __builtin_longjmp at
3016 all. The non-local goto machinery really requires the
3017 destination be in a different function. If we allow the
3018 function calling __builtin_longjmp to be inlined into the
3019 function calling __builtin_setjmp, Things will Go Awry. */
3020 inline_forbidden_reason
3021 = G_("function %q+F can never be inlined because "
3022 "it uses setjmp-longjmp exception handling");
3023 *handled_ops_p = true;
3024 return t;
3026 case BUILT_IN_NONLOCAL_GOTO:
3027 /* Similarly. */
3028 inline_forbidden_reason
3029 = G_("function %q+F can never be inlined because "
3030 "it uses non-local goto");
3031 *handled_ops_p = true;
3032 return t;
3034 case BUILT_IN_RETURN:
3035 case BUILT_IN_APPLY_ARGS:
3036 /* If a __builtin_apply_args caller would be inlined,
3037 it would be saving arguments of the function it has
3038 been inlined into. Similarly __builtin_return would
3039 return from the function the inline has been inlined into. */
3040 inline_forbidden_reason
3041 = G_("function %q+F can never be inlined because "
3042 "it uses __builtin_return or __builtin_apply_args");
3043 *handled_ops_p = true;
3044 return t;
3046 default:
3047 break;
3049 break;
3051 case GIMPLE_GOTO:
3052 t = gimple_goto_dest (stmt);
3054 /* We will not inline a function which uses computed goto. The
3055 addresses of its local labels, which may be tucked into
3056 global storage, are of course not constant across
3057 instantiations, which causes unexpected behavior. */
3058 if (TREE_CODE (t) != LABEL_DECL)
3060 inline_forbidden_reason
3061 = G_("function %q+F can never be inlined "
3062 "because it contains a computed goto");
3063 *handled_ops_p = true;
3064 return t;
3066 break;
3068 default:
3069 break;
3072 *handled_ops_p = false;
3073 return NULL_TREE;
3076 /* Return true if FNDECL is a function that cannot be inlined into
3077 another one. */
3079 static bool
3080 inline_forbidden_p (tree fndecl)
3082 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3083 struct walk_stmt_info wi;
3084 struct pointer_set_t *visited_nodes;
3085 basic_block bb;
3086 bool forbidden_p = false;
3088 /* First check for shared reasons not to copy the code. */
3089 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3090 if (inline_forbidden_reason != NULL)
3091 return true;
3093 /* Next, walk the statements of the function looking for
3094 constraucts we can't handle, or are non-optimal for inlining. */
3095 visited_nodes = pointer_set_create ();
3096 memset (&wi, 0, sizeof (wi));
3097 wi.info = (void *) fndecl;
3098 wi.pset = visited_nodes;
3100 FOR_EACH_BB_FN (bb, fun)
3102 gimple ret;
3103 gimple_seq seq = bb_seq (bb);
3104 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3105 forbidden_p = (ret != NULL);
3106 if (forbidden_p)
3107 break;
3110 pointer_set_destroy (visited_nodes);
3111 return forbidden_p;
3114 /* Return true if CALLEE cannot be inlined into CALLER. */
3116 static bool
3117 inline_forbidden_into_p (tree caller, tree callee)
3119 /* Don't inline if the functions have different EH personalities. */
3120 if (DECL_FUNCTION_PERSONALITY (caller)
3121 && DECL_FUNCTION_PERSONALITY (callee)
3122 && (DECL_FUNCTION_PERSONALITY (caller)
3123 != DECL_FUNCTION_PERSONALITY (callee)))
3124 return true;
3126 /* Don't inline if the callee can throw non-call exceptions but the
3127 caller cannot. */
3128 if (DECL_STRUCT_FUNCTION (callee)
3129 && DECL_STRUCT_FUNCTION (callee)->can_throw_non_call_exceptions
3130 && !(DECL_STRUCT_FUNCTION (caller)
3131 && DECL_STRUCT_FUNCTION (caller)->can_throw_non_call_exceptions))
3132 return true;
3134 return false;
3137 /* Returns nonzero if FN is a function that does not have any
3138 fundamental inline blocking properties. */
3140 bool
3141 tree_inlinable_function_p (tree fn)
3143 bool inlinable = true;
3144 bool do_warning;
3145 tree always_inline;
3147 /* If we've already decided this function shouldn't be inlined,
3148 there's no need to check again. */
3149 if (DECL_UNINLINABLE (fn))
3150 return false;
3152 /* We only warn for functions declared `inline' by the user. */
3153 do_warning = (warn_inline
3154 && DECL_DECLARED_INLINE_P (fn)
3155 && !DECL_NO_INLINE_WARNING_P (fn)
3156 && !DECL_IN_SYSTEM_HEADER (fn));
3158 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3160 if (flag_no_inline
3161 && always_inline == NULL)
3163 if (do_warning)
3164 warning (OPT_Winline, "function %q+F can never be inlined because it "
3165 "is suppressed using -fno-inline", fn);
3166 inlinable = false;
3169 /* Don't auto-inline anything that might not be bound within
3170 this unit of translation. */
3171 else if (!DECL_DECLARED_INLINE_P (fn)
3172 && DECL_REPLACEABLE_P (fn))
3173 inlinable = false;
3175 else if (!function_attribute_inlinable_p (fn))
3177 if (do_warning)
3178 warning (OPT_Winline, "function %q+F can never be inlined because it "
3179 "uses attributes conflicting with inlining", fn);
3180 inlinable = false;
3183 else if (inline_forbidden_p (fn))
3185 /* See if we should warn about uninlinable functions. Previously,
3186 some of these warnings would be issued while trying to expand
3187 the function inline, but that would cause multiple warnings
3188 about functions that would for example call alloca. But since
3189 this a property of the function, just one warning is enough.
3190 As a bonus we can now give more details about the reason why a
3191 function is not inlinable. */
3192 if (always_inline)
3193 sorry (inline_forbidden_reason, fn);
3194 else if (do_warning)
3195 warning (OPT_Winline, inline_forbidden_reason, fn);
3197 inlinable = false;
3200 /* Squirrel away the result so that we don't have to check again. */
3201 DECL_UNINLINABLE (fn) = !inlinable;
3203 return inlinable;
3206 /* Estimate the cost of a memory move. Use machine dependent
3207 word size and take possible memcpy call into account. */
3210 estimate_move_cost (tree type)
3212 HOST_WIDE_INT size;
3214 gcc_assert (!VOID_TYPE_P (type));
3216 size = int_size_in_bytes (type);
3218 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3219 /* Cost of a memcpy call, 3 arguments and the call. */
3220 return 4;
3221 else
3222 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3225 /* Returns cost of operation CODE, according to WEIGHTS */
3227 static int
3228 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3229 tree op1 ATTRIBUTE_UNUSED, tree op2)
3231 switch (code)
3233 /* These are "free" conversions, or their presumed cost
3234 is folded into other operations. */
3235 case RANGE_EXPR:
3236 CASE_CONVERT:
3237 case COMPLEX_EXPR:
3238 case PAREN_EXPR:
3239 return 0;
3241 /* Assign cost of 1 to usual operations.
3242 ??? We may consider mapping RTL costs to this. */
3243 case COND_EXPR:
3244 case VEC_COND_EXPR:
3246 case PLUS_EXPR:
3247 case POINTER_PLUS_EXPR:
3248 case MINUS_EXPR:
3249 case MULT_EXPR:
3251 case ADDR_SPACE_CONVERT_EXPR:
3252 case FIXED_CONVERT_EXPR:
3253 case FIX_TRUNC_EXPR:
3255 case NEGATE_EXPR:
3256 case FLOAT_EXPR:
3257 case MIN_EXPR:
3258 case MAX_EXPR:
3259 case ABS_EXPR:
3261 case LSHIFT_EXPR:
3262 case RSHIFT_EXPR:
3263 case LROTATE_EXPR:
3264 case RROTATE_EXPR:
3265 case VEC_LSHIFT_EXPR:
3266 case VEC_RSHIFT_EXPR:
3268 case BIT_IOR_EXPR:
3269 case BIT_XOR_EXPR:
3270 case BIT_AND_EXPR:
3271 case BIT_NOT_EXPR:
3273 case TRUTH_ANDIF_EXPR:
3274 case TRUTH_ORIF_EXPR:
3275 case TRUTH_AND_EXPR:
3276 case TRUTH_OR_EXPR:
3277 case TRUTH_XOR_EXPR:
3278 case TRUTH_NOT_EXPR:
3280 case LT_EXPR:
3281 case LE_EXPR:
3282 case GT_EXPR:
3283 case GE_EXPR:
3284 case EQ_EXPR:
3285 case NE_EXPR:
3286 case ORDERED_EXPR:
3287 case UNORDERED_EXPR:
3289 case UNLT_EXPR:
3290 case UNLE_EXPR:
3291 case UNGT_EXPR:
3292 case UNGE_EXPR:
3293 case UNEQ_EXPR:
3294 case LTGT_EXPR:
3296 case CONJ_EXPR:
3298 case PREDECREMENT_EXPR:
3299 case PREINCREMENT_EXPR:
3300 case POSTDECREMENT_EXPR:
3301 case POSTINCREMENT_EXPR:
3303 case REALIGN_LOAD_EXPR:
3305 case REDUC_MAX_EXPR:
3306 case REDUC_MIN_EXPR:
3307 case REDUC_PLUS_EXPR:
3308 case WIDEN_SUM_EXPR:
3309 case WIDEN_MULT_EXPR:
3310 case DOT_PROD_EXPR:
3311 case WIDEN_MULT_PLUS_EXPR:
3312 case WIDEN_MULT_MINUS_EXPR:
3314 case VEC_WIDEN_MULT_HI_EXPR:
3315 case VEC_WIDEN_MULT_LO_EXPR:
3316 case VEC_UNPACK_HI_EXPR:
3317 case VEC_UNPACK_LO_EXPR:
3318 case VEC_UNPACK_FLOAT_HI_EXPR:
3319 case VEC_UNPACK_FLOAT_LO_EXPR:
3320 case VEC_PACK_TRUNC_EXPR:
3321 case VEC_PACK_SAT_EXPR:
3322 case VEC_PACK_FIX_TRUNC_EXPR:
3323 case VEC_EXTRACT_EVEN_EXPR:
3324 case VEC_EXTRACT_ODD_EXPR:
3325 case VEC_INTERLEAVE_HIGH_EXPR:
3326 case VEC_INTERLEAVE_LOW_EXPR:
3328 return 1;
3330 /* Few special cases of expensive operations. This is useful
3331 to avoid inlining on functions having too many of these. */
3332 case TRUNC_DIV_EXPR:
3333 case CEIL_DIV_EXPR:
3334 case FLOOR_DIV_EXPR:
3335 case ROUND_DIV_EXPR:
3336 case EXACT_DIV_EXPR:
3337 case TRUNC_MOD_EXPR:
3338 case CEIL_MOD_EXPR:
3339 case FLOOR_MOD_EXPR:
3340 case ROUND_MOD_EXPR:
3341 case RDIV_EXPR:
3342 if (TREE_CODE (op2) != INTEGER_CST)
3343 return weights->div_mod_cost;
3344 return 1;
3346 default:
3347 /* We expect a copy assignment with no operator. */
3348 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3349 return 0;
3354 /* Estimate number of instructions that will be created by expanding
3355 the statements in the statement sequence STMTS.
3356 WEIGHTS contains weights attributed to various constructs. */
3358 static
3359 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3361 int cost;
3362 gimple_stmt_iterator gsi;
3364 cost = 0;
3365 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3366 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3368 return cost;
3372 /* Estimate number of instructions that will be created by expanding STMT.
3373 WEIGHTS contains weights attributed to various constructs. */
3376 estimate_num_insns (gimple stmt, eni_weights *weights)
3378 unsigned cost, i;
3379 enum gimple_code code = gimple_code (stmt);
3380 tree lhs;
3381 tree rhs;
3383 switch (code)
3385 case GIMPLE_ASSIGN:
3386 /* Try to estimate the cost of assignments. We have three cases to
3387 deal with:
3388 1) Simple assignments to registers;
3389 2) Stores to things that must live in memory. This includes
3390 "normal" stores to scalars, but also assignments of large
3391 structures, or constructors of big arrays;
3393 Let us look at the first two cases, assuming we have "a = b + C":
3394 <GIMPLE_ASSIGN <var_decl "a">
3395 <plus_expr <var_decl "b"> <constant C>>
3396 If "a" is a GIMPLE register, the assignment to it is free on almost
3397 any target, because "a" usually ends up in a real register. Hence
3398 the only cost of this expression comes from the PLUS_EXPR, and we
3399 can ignore the GIMPLE_ASSIGN.
3400 If "a" is not a GIMPLE register, the assignment to "a" will most
3401 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3402 of moving something into "a", which we compute using the function
3403 estimate_move_cost. */
3404 lhs = gimple_assign_lhs (stmt);
3405 rhs = gimple_assign_rhs1 (stmt);
3407 if (is_gimple_reg (lhs))
3408 cost = 0;
3409 else
3410 cost = estimate_move_cost (TREE_TYPE (lhs));
3412 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3413 cost += estimate_move_cost (TREE_TYPE (rhs));
3415 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3416 gimple_assign_rhs1 (stmt),
3417 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3418 == GIMPLE_BINARY_RHS
3419 ? gimple_assign_rhs2 (stmt) : NULL);
3420 break;
3422 case GIMPLE_COND:
3423 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3424 gimple_op (stmt, 0),
3425 gimple_op (stmt, 1));
3426 break;
3428 case GIMPLE_SWITCH:
3429 /* Take into account cost of the switch + guess 2 conditional jumps for
3430 each case label.
3432 TODO: once the switch expansion logic is sufficiently separated, we can
3433 do better job on estimating cost of the switch. */
3434 if (weights->time_based)
3435 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3436 else
3437 cost = gimple_switch_num_labels (stmt) * 2;
3438 break;
3440 case GIMPLE_CALL:
3442 tree decl = gimple_call_fndecl (stmt);
3443 tree addr = gimple_call_fn (stmt);
3444 tree funtype = TREE_TYPE (addr);
3445 bool stdarg = false;
3447 if (POINTER_TYPE_P (funtype))
3448 funtype = TREE_TYPE (funtype);
3450 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3451 cost = weights->target_builtin_call_cost;
3452 else
3453 cost = weights->call_cost;
3455 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3456 switch (DECL_FUNCTION_CODE (decl))
3458 /* Builtins that expand to constants. */
3459 case BUILT_IN_CONSTANT_P:
3460 case BUILT_IN_EXPECT:
3461 case BUILT_IN_OBJECT_SIZE:
3462 case BUILT_IN_UNREACHABLE:
3463 /* Simple register moves or loads from stack. */
3464 case BUILT_IN_RETURN_ADDRESS:
3465 case BUILT_IN_EXTRACT_RETURN_ADDR:
3466 case BUILT_IN_FROB_RETURN_ADDR:
3467 case BUILT_IN_RETURN:
3468 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
3469 case BUILT_IN_FRAME_ADDRESS:
3470 case BUILT_IN_VA_END:
3471 case BUILT_IN_STACK_SAVE:
3472 case BUILT_IN_STACK_RESTORE:
3473 /* Exception state returns or moves registers around. */
3474 case BUILT_IN_EH_FILTER:
3475 case BUILT_IN_EH_POINTER:
3476 case BUILT_IN_EH_COPY_VALUES:
3477 return 0;
3479 /* builtins that are not expensive (that is they are most probably
3480 expanded inline into resonably simple code). */
3481 case BUILT_IN_ABS:
3482 case BUILT_IN_ALLOCA:
3483 case BUILT_IN_BSWAP32:
3484 case BUILT_IN_BSWAP64:
3485 case BUILT_IN_CLZ:
3486 case BUILT_IN_CLZIMAX:
3487 case BUILT_IN_CLZL:
3488 case BUILT_IN_CLZLL:
3489 case BUILT_IN_CTZ:
3490 case BUILT_IN_CTZIMAX:
3491 case BUILT_IN_CTZL:
3492 case BUILT_IN_CTZLL:
3493 case BUILT_IN_FFS:
3494 case BUILT_IN_FFSIMAX:
3495 case BUILT_IN_FFSL:
3496 case BUILT_IN_FFSLL:
3497 case BUILT_IN_IMAXABS:
3498 case BUILT_IN_FINITE:
3499 case BUILT_IN_FINITEF:
3500 case BUILT_IN_FINITEL:
3501 case BUILT_IN_FINITED32:
3502 case BUILT_IN_FINITED64:
3503 case BUILT_IN_FINITED128:
3504 case BUILT_IN_FPCLASSIFY:
3505 case BUILT_IN_ISFINITE:
3506 case BUILT_IN_ISINF_SIGN:
3507 case BUILT_IN_ISINF:
3508 case BUILT_IN_ISINFF:
3509 case BUILT_IN_ISINFL:
3510 case BUILT_IN_ISINFD32:
3511 case BUILT_IN_ISINFD64:
3512 case BUILT_IN_ISINFD128:
3513 case BUILT_IN_ISNAN:
3514 case BUILT_IN_ISNANF:
3515 case BUILT_IN_ISNANL:
3516 case BUILT_IN_ISNAND32:
3517 case BUILT_IN_ISNAND64:
3518 case BUILT_IN_ISNAND128:
3519 case BUILT_IN_ISNORMAL:
3520 case BUILT_IN_ISGREATER:
3521 case BUILT_IN_ISGREATEREQUAL:
3522 case BUILT_IN_ISLESS:
3523 case BUILT_IN_ISLESSEQUAL:
3524 case BUILT_IN_ISLESSGREATER:
3525 case BUILT_IN_ISUNORDERED:
3526 case BUILT_IN_VA_ARG_PACK:
3527 case BUILT_IN_VA_ARG_PACK_LEN:
3528 case BUILT_IN_VA_COPY:
3529 case BUILT_IN_TRAP:
3530 case BUILT_IN_SAVEREGS:
3531 case BUILT_IN_POPCOUNTL:
3532 case BUILT_IN_POPCOUNTLL:
3533 case BUILT_IN_POPCOUNTIMAX:
3534 case BUILT_IN_POPCOUNT:
3535 case BUILT_IN_PARITYL:
3536 case BUILT_IN_PARITYLL:
3537 case BUILT_IN_PARITYIMAX:
3538 case BUILT_IN_PARITY:
3539 case BUILT_IN_LABS:
3540 case BUILT_IN_LLABS:
3541 case BUILT_IN_PREFETCH:
3542 cost = weights->target_builtin_call_cost;
3543 break;
3545 default:
3546 break;
3549 if (decl)
3550 funtype = TREE_TYPE (decl);
3552 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3553 cost += estimate_move_cost (TREE_TYPE (funtype));
3555 if (funtype)
3556 stdarg = stdarg_p (funtype);
3558 /* Our cost must be kept in sync with
3559 cgraph_estimate_size_after_inlining that does use function
3560 declaration to figure out the arguments.
3562 For functions taking variable list of arguments we must
3563 look into call statement intself. This is safe because
3564 we will get only higher costs and in most cases we will
3565 not inline these anyway. */
3566 if (decl && DECL_ARGUMENTS (decl) && !stdarg)
3568 tree arg;
3569 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3570 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3571 cost += estimate_move_cost (TREE_TYPE (arg));
3573 else if (funtype && prototype_p (funtype) && !stdarg)
3575 tree t;
3576 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3577 t = TREE_CHAIN (t))
3578 if (!VOID_TYPE_P (TREE_VALUE (t)))
3579 cost += estimate_move_cost (TREE_VALUE (t));
3581 else
3583 for (i = 0; i < gimple_call_num_args (stmt); i++)
3585 tree arg = gimple_call_arg (stmt, i);
3586 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3587 cost += estimate_move_cost (TREE_TYPE (arg));
3591 break;
3594 case GIMPLE_GOTO:
3595 case GIMPLE_LABEL:
3596 case GIMPLE_NOP:
3597 case GIMPLE_PHI:
3598 case GIMPLE_RETURN:
3599 case GIMPLE_PREDICT:
3600 case GIMPLE_DEBUG:
3601 return 0;
3603 case GIMPLE_ASM:
3604 return asm_str_count (gimple_asm_string (stmt));
3606 case GIMPLE_RESX:
3607 /* This is either going to be an external function call with one
3608 argument, or two register copy statements plus a goto. */
3609 return 2;
3611 case GIMPLE_EH_DISPATCH:
3612 /* ??? This is going to turn into a switch statement. Ideally
3613 we'd have a look at the eh region and estimate the number of
3614 edges involved. */
3615 return 10;
3617 case GIMPLE_BIND:
3618 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3620 case GIMPLE_EH_FILTER:
3621 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3623 case GIMPLE_CATCH:
3624 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3626 case GIMPLE_TRY:
3627 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3628 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3630 /* OpenMP directives are generally very expensive. */
3632 case GIMPLE_OMP_RETURN:
3633 case GIMPLE_OMP_SECTIONS_SWITCH:
3634 case GIMPLE_OMP_ATOMIC_STORE:
3635 case GIMPLE_OMP_CONTINUE:
3636 /* ...except these, which are cheap. */
3637 return 0;
3639 case GIMPLE_OMP_ATOMIC_LOAD:
3640 return weights->omp_cost;
3642 case GIMPLE_OMP_FOR:
3643 return (weights->omp_cost
3644 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3645 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3647 case GIMPLE_OMP_PARALLEL:
3648 case GIMPLE_OMP_TASK:
3649 case GIMPLE_OMP_CRITICAL:
3650 case GIMPLE_OMP_MASTER:
3651 case GIMPLE_OMP_ORDERED:
3652 case GIMPLE_OMP_SECTION:
3653 case GIMPLE_OMP_SECTIONS:
3654 case GIMPLE_OMP_SINGLE:
3655 return (weights->omp_cost
3656 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3658 default:
3659 gcc_unreachable ();
3662 return cost;
3665 /* Estimate number of instructions that will be created by expanding
3666 function FNDECL. WEIGHTS contains weights attributed to various
3667 constructs. */
3670 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3672 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3673 gimple_stmt_iterator bsi;
3674 basic_block bb;
3675 int n = 0;
3677 gcc_assert (my_function && my_function->cfg);
3678 FOR_EACH_BB_FN (bb, my_function)
3680 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3681 n += estimate_num_insns (gsi_stmt (bsi), weights);
3684 return n;
3688 /* Initializes weights used by estimate_num_insns. */
3690 void
3691 init_inline_once (void)
3693 eni_size_weights.call_cost = 1;
3694 eni_size_weights.target_builtin_call_cost = 1;
3695 eni_size_weights.div_mod_cost = 1;
3696 eni_size_weights.omp_cost = 40;
3697 eni_size_weights.time_based = false;
3699 /* Estimating time for call is difficult, since we have no idea what the
3700 called function does. In the current uses of eni_time_weights,
3701 underestimating the cost does less harm than overestimating it, so
3702 we choose a rather small value here. */
3703 eni_time_weights.call_cost = 10;
3704 eni_time_weights.target_builtin_call_cost = 10;
3705 eni_time_weights.div_mod_cost = 10;
3706 eni_time_weights.omp_cost = 40;
3707 eni_time_weights.time_based = true;
3710 /* Estimate the number of instructions in a gimple_seq. */
3713 count_insns_seq (gimple_seq seq, eni_weights *weights)
3715 gimple_stmt_iterator gsi;
3716 int n = 0;
3717 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3718 n += estimate_num_insns (gsi_stmt (gsi), weights);
3720 return n;
3724 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3726 static void
3727 prepend_lexical_block (tree current_block, tree new_block)
3729 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3730 BLOCK_SUBBLOCKS (current_block) = new_block;
3731 BLOCK_SUPERCONTEXT (new_block) = current_block;
3734 /* Add local variables from CALLEE to CALLER. */
3736 static inline void
3737 add_local_variables (struct function *callee, struct function *caller,
3738 copy_body_data *id, bool check_var_ann)
3740 tree var;
3741 unsigned ix;
3743 FOR_EACH_LOCAL_DECL (callee, ix, var)
3744 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3746 if (!check_var_ann
3747 || (var_ann (var) && add_referenced_var (var)))
3748 add_local_decl (caller, var);
3750 else if (!can_be_nonlocal (var, id))
3751 add_local_decl (caller, remap_decl (var, id));
3754 /* Fetch callee declaration from the call graph edge going from NODE and
3755 associated with STMR call statement. Return NULL_TREE if not found. */
3756 static tree
3757 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3759 struct cgraph_edge *cs;
3761 cs = cgraph_edge (node, stmt);
3762 if (cs && !cs->indirect_unknown_callee)
3763 return cs->callee->decl;
3765 return NULL_TREE;
3768 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3770 static bool
3771 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3773 tree use_retvar;
3774 tree fn;
3775 struct pointer_map_t *st, *dst;
3776 tree return_slot;
3777 tree modify_dest;
3778 location_t saved_location;
3779 struct cgraph_edge *cg_edge;
3780 cgraph_inline_failed_t reason;
3781 basic_block return_block;
3782 edge e;
3783 gimple_stmt_iterator gsi, stmt_gsi;
3784 bool successfully_inlined = FALSE;
3785 bool purge_dead_abnormal_edges;
3787 /* Set input_location here so we get the right instantiation context
3788 if we call instantiate_decl from inlinable_function_p. */
3789 saved_location = input_location;
3790 if (gimple_has_location (stmt))
3791 input_location = gimple_location (stmt);
3793 /* From here on, we're only interested in CALL_EXPRs. */
3794 if (gimple_code (stmt) != GIMPLE_CALL)
3795 goto egress;
3797 /* First, see if we can figure out what function is being called.
3798 If we cannot, then there is no hope of inlining the function. */
3799 fn = gimple_call_fndecl (stmt);
3800 if (!fn)
3802 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3803 if (!fn)
3804 goto egress;
3807 /* Turn forward declarations into real ones. */
3808 fn = cgraph_node (fn)->decl;
3810 /* If FN is a declaration of a function in a nested scope that was
3811 globally declared inline, we don't set its DECL_INITIAL.
3812 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3813 C++ front-end uses it for cdtors to refer to their internal
3814 declarations, that are not real functions. Fortunately those
3815 don't have trees to be saved, so we can tell by checking their
3816 gimple_body. */
3817 if (!DECL_INITIAL (fn)
3818 && DECL_ABSTRACT_ORIGIN (fn)
3819 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3820 fn = DECL_ABSTRACT_ORIGIN (fn);
3822 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3823 Kill this check once this is fixed. */
3824 if (!id->dst_node->analyzed)
3825 goto egress;
3827 cg_edge = cgraph_edge (id->dst_node, stmt);
3829 /* First check that inlining isn't simply forbidden in this case. */
3830 if (inline_forbidden_into_p (cg_edge->caller->decl, cg_edge->callee->decl))
3831 goto egress;
3833 /* Don't try to inline functions that are not well-suited to inlining. */
3834 if (!cgraph_inline_p (cg_edge, &reason))
3836 /* If this call was originally indirect, we do not want to emit any
3837 inlining related warnings or sorry messages because there are no
3838 guarantees regarding those. */
3839 if (cg_edge->indirect_inlining_edge)
3840 goto egress;
3842 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3843 /* Avoid warnings during early inline pass. */
3844 && cgraph_global_info_ready)
3846 sorry ("inlining failed in call to %q+F: %s", fn,
3847 _(cgraph_inline_failed_string (reason)));
3848 sorry ("called from here");
3850 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3851 && !DECL_IN_SYSTEM_HEADER (fn)
3852 && reason != CIF_UNSPECIFIED
3853 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3854 /* Avoid warnings during early inline pass. */
3855 && cgraph_global_info_ready)
3857 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3858 fn, _(cgraph_inline_failed_string (reason)));
3859 warning (OPT_Winline, "called from here");
3861 goto egress;
3863 fn = cg_edge->callee->decl;
3865 #ifdef ENABLE_CHECKING
3866 if (cg_edge->callee->decl != id->dst_node->decl)
3867 verify_cgraph_node (cg_edge->callee);
3868 #endif
3870 /* We will be inlining this callee. */
3871 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3873 /* Update the callers EH personality. */
3874 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3875 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3876 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3878 /* Split the block holding the GIMPLE_CALL. */
3879 e = split_block (bb, stmt);
3880 bb = e->src;
3881 return_block = e->dest;
3882 remove_edge (e);
3884 /* split_block splits after the statement; work around this by
3885 moving the call into the second block manually. Not pretty,
3886 but seems easier than doing the CFG manipulation by hand
3887 when the GIMPLE_CALL is in the last statement of BB. */
3888 stmt_gsi = gsi_last_bb (bb);
3889 gsi_remove (&stmt_gsi, false);
3891 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3892 been the source of abnormal edges. In this case, schedule
3893 the removal of dead abnormal edges. */
3894 gsi = gsi_start_bb (return_block);
3895 if (gsi_end_p (gsi))
3897 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3898 purge_dead_abnormal_edges = true;
3900 else
3902 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3903 purge_dead_abnormal_edges = false;
3906 stmt_gsi = gsi_start_bb (return_block);
3908 /* Build a block containing code to initialize the arguments, the
3909 actual inline expansion of the body, and a label for the return
3910 statements within the function to jump to. The type of the
3911 statement expression is the return type of the function call. */
3912 id->block = make_node (BLOCK);
3913 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3914 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3915 prepend_lexical_block (gimple_block (stmt), id->block);
3917 /* Local declarations will be replaced by their equivalents in this
3918 map. */
3919 st = id->decl_map;
3920 id->decl_map = pointer_map_create ();
3921 dst = id->debug_map;
3922 id->debug_map = NULL;
3924 /* Record the function we are about to inline. */
3925 id->src_fn = fn;
3926 id->src_node = cg_edge->callee;
3927 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3928 id->gimple_call = stmt;
3930 gcc_assert (!id->src_cfun->after_inlining);
3932 id->entry_bb = bb;
3933 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3935 gimple_stmt_iterator si = gsi_last_bb (bb);
3936 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3937 NOT_TAKEN),
3938 GSI_NEW_STMT);
3940 initialize_inlined_parameters (id, stmt, fn, bb);
3942 if (DECL_INITIAL (fn))
3943 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3945 /* Return statements in the function body will be replaced by jumps
3946 to the RET_LABEL. */
3947 gcc_assert (DECL_INITIAL (fn));
3948 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3950 /* Find the LHS to which the result of this call is assigned. */
3951 return_slot = NULL;
3952 if (gimple_call_lhs (stmt))
3954 modify_dest = gimple_call_lhs (stmt);
3956 /* The function which we are inlining might not return a value,
3957 in which case we should issue a warning that the function
3958 does not return a value. In that case the optimizers will
3959 see that the variable to which the value is assigned was not
3960 initialized. We do not want to issue a warning about that
3961 uninitialized variable. */
3962 if (DECL_P (modify_dest))
3963 TREE_NO_WARNING (modify_dest) = 1;
3965 if (gimple_call_return_slot_opt_p (stmt))
3967 return_slot = modify_dest;
3968 modify_dest = NULL;
3971 else
3972 modify_dest = NULL;
3974 /* If we are inlining a call to the C++ operator new, we don't want
3975 to use type based alias analysis on the return value. Otherwise
3976 we may get confused if the compiler sees that the inlined new
3977 function returns a pointer which was just deleted. See bug
3978 33407. */
3979 if (DECL_IS_OPERATOR_NEW (fn))
3981 return_slot = NULL;
3982 modify_dest = NULL;
3985 /* Declare the return variable for the function. */
3986 use_retvar = declare_return_variable (id, return_slot, modify_dest);
3988 /* Add local vars in this inlined callee to caller. */
3989 add_local_variables (id->src_cfun, cfun, id, true);
3991 if (dump_file && (dump_flags & TDF_DETAILS))
3993 fprintf (dump_file, "Inlining ");
3994 print_generic_expr (dump_file, id->src_fn, 0);
3995 fprintf (dump_file, " to ");
3996 print_generic_expr (dump_file, id->dst_fn, 0);
3997 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4000 /* This is it. Duplicate the callee body. Assume callee is
4001 pre-gimplified. Note that we must not alter the caller
4002 function in any way before this point, as this CALL_EXPR may be
4003 a self-referential call; if we're calling ourselves, we need to
4004 duplicate our body before altering anything. */
4005 copy_body (id, bb->count,
4006 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
4007 bb, return_block, NULL, NULL);
4009 /* Reset the escaped solution. */
4010 if (cfun->gimple_df)
4011 pt_solution_reset (&cfun->gimple_df->escaped);
4013 /* Clean up. */
4014 if (id->debug_map)
4016 pointer_map_destroy (id->debug_map);
4017 id->debug_map = dst;
4019 pointer_map_destroy (id->decl_map);
4020 id->decl_map = st;
4022 /* Unlink the calls virtual operands before replacing it. */
4023 unlink_stmt_vdef (stmt);
4025 /* If the inlined function returns a result that we care about,
4026 substitute the GIMPLE_CALL with an assignment of the return
4027 variable to the LHS of the call. That is, if STMT was
4028 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4029 if (use_retvar && gimple_call_lhs (stmt))
4031 gimple old_stmt = stmt;
4032 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4033 gsi_replace (&stmt_gsi, stmt, false);
4034 if (gimple_in_ssa_p (cfun))
4035 mark_symbols_for_renaming (stmt);
4036 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4038 else
4040 /* Handle the case of inlining a function with no return
4041 statement, which causes the return value to become undefined. */
4042 if (gimple_call_lhs (stmt)
4043 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4045 tree name = gimple_call_lhs (stmt);
4046 tree var = SSA_NAME_VAR (name);
4047 tree def = gimple_default_def (cfun, var);
4049 if (def)
4051 /* If the variable is used undefined, make this name
4052 undefined via a move. */
4053 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4054 gsi_replace (&stmt_gsi, stmt, true);
4056 else
4058 /* Otherwise make this variable undefined. */
4059 gsi_remove (&stmt_gsi, true);
4060 set_default_def (var, name);
4061 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4064 else
4065 gsi_remove (&stmt_gsi, true);
4068 if (purge_dead_abnormal_edges)
4069 gimple_purge_dead_abnormal_call_edges (return_block);
4071 /* If the value of the new expression is ignored, that's OK. We
4072 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4073 the equivalent inlined version either. */
4074 if (is_gimple_assign (stmt))
4076 gcc_assert (gimple_assign_single_p (stmt)
4077 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4078 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4081 /* Output the inlining info for this abstract function, since it has been
4082 inlined. If we don't do this now, we can lose the information about the
4083 variables in the function when the blocks get blown away as soon as we
4084 remove the cgraph node. */
4085 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4087 /* Update callgraph if needed. */
4088 cgraph_remove_node (cg_edge->callee);
4090 id->block = NULL_TREE;
4091 successfully_inlined = TRUE;
4093 egress:
4094 input_location = saved_location;
4095 return successfully_inlined;
4098 /* Expand call statements reachable from STMT_P.
4099 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4100 in a MODIFY_EXPR. See gimple.c:get_call_expr_in(). We can
4101 unfortunately not use that function here because we need a pointer
4102 to the CALL_EXPR, not the tree itself. */
4104 static bool
4105 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4107 gimple_stmt_iterator gsi;
4109 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4111 gimple stmt = gsi_stmt (gsi);
4113 if (is_gimple_call (stmt)
4114 && expand_call_inline (bb, stmt, id))
4115 return true;
4118 return false;
4122 /* Walk all basic blocks created after FIRST and try to fold every statement
4123 in the STATEMENTS pointer set. */
4125 static void
4126 fold_marked_statements (int first, struct pointer_set_t *statements)
4128 for (; first < n_basic_blocks; first++)
4129 if (BASIC_BLOCK (first))
4131 gimple_stmt_iterator gsi;
4133 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4134 !gsi_end_p (gsi);
4135 gsi_next (&gsi))
4136 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4138 gimple old_stmt = gsi_stmt (gsi);
4139 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4141 if (old_decl && DECL_BUILT_IN (old_decl))
4143 /* Folding builtins can create multiple instructions,
4144 we need to look at all of them. */
4145 gimple_stmt_iterator i2 = gsi;
4146 gsi_prev (&i2);
4147 if (fold_stmt (&gsi))
4149 gimple new_stmt;
4150 if (gsi_end_p (i2))
4151 i2 = gsi_start_bb (BASIC_BLOCK (first));
4152 else
4153 gsi_next (&i2);
4154 while (1)
4156 new_stmt = gsi_stmt (i2);
4157 update_stmt (new_stmt);
4158 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4159 new_stmt);
4161 if (new_stmt == gsi_stmt (gsi))
4163 /* It is okay to check only for the very last
4164 of these statements. If it is a throwing
4165 statement nothing will change. If it isn't
4166 this can remove EH edges. If that weren't
4167 correct then because some intermediate stmts
4168 throw, but not the last one. That would mean
4169 we'd have to split the block, which we can't
4170 here and we'd loose anyway. And as builtins
4171 probably never throw, this all
4172 is mood anyway. */
4173 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4174 new_stmt))
4175 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4176 break;
4178 gsi_next (&i2);
4182 else if (fold_stmt (&gsi))
4184 /* Re-read the statement from GSI as fold_stmt() may
4185 have changed it. */
4186 gimple new_stmt = gsi_stmt (gsi);
4187 update_stmt (new_stmt);
4189 if (is_gimple_call (old_stmt)
4190 || is_gimple_call (new_stmt))
4191 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4192 new_stmt);
4194 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4195 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4201 /* Return true if BB has at least one abnormal outgoing edge. */
4203 static inline bool
4204 has_abnormal_outgoing_edge_p (basic_block bb)
4206 edge e;
4207 edge_iterator ei;
4209 FOR_EACH_EDGE (e, ei, bb->succs)
4210 if (e->flags & EDGE_ABNORMAL)
4211 return true;
4213 return false;
4216 /* Expand calls to inline functions in the body of FN. */
4218 unsigned int
4219 optimize_inline_calls (tree fn)
4221 copy_body_data id;
4222 basic_block bb;
4223 int last = n_basic_blocks;
4224 struct gimplify_ctx gctx;
4226 /* There is no point in performing inlining if errors have already
4227 occurred -- and we might crash if we try to inline invalid
4228 code. */
4229 if (seen_error ())
4230 return 0;
4232 /* Clear out ID. */
4233 memset (&id, 0, sizeof (id));
4235 id.src_node = id.dst_node = cgraph_node (fn);
4236 id.dst_fn = fn;
4237 /* Or any functions that aren't finished yet. */
4238 if (current_function_decl)
4239 id.dst_fn = current_function_decl;
4241 id.copy_decl = copy_decl_maybe_to_var;
4242 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4243 id.transform_new_cfg = false;
4244 id.transform_return_to_modify = true;
4245 id.transform_lang_insert_block = NULL;
4246 id.statements_to_fold = pointer_set_create ();
4248 push_gimplify_context (&gctx);
4250 /* We make no attempts to keep dominance info up-to-date. */
4251 free_dominance_info (CDI_DOMINATORS);
4252 free_dominance_info (CDI_POST_DOMINATORS);
4254 /* Register specific gimple functions. */
4255 gimple_register_cfg_hooks ();
4257 /* Reach the trees by walking over the CFG, and note the
4258 enclosing basic-blocks in the call edges. */
4259 /* We walk the blocks going forward, because inlined function bodies
4260 will split id->current_basic_block, and the new blocks will
4261 follow it; we'll trudge through them, processing their CALL_EXPRs
4262 along the way. */
4263 FOR_EACH_BB (bb)
4264 gimple_expand_calls_inline (bb, &id);
4266 pop_gimplify_context (NULL);
4268 #ifdef ENABLE_CHECKING
4270 struct cgraph_edge *e;
4272 verify_cgraph_node (id.dst_node);
4274 /* Double check that we inlined everything we are supposed to inline. */
4275 for (e = id.dst_node->callees; e; e = e->next_callee)
4276 gcc_assert (e->inline_failed);
4278 #endif
4280 /* Fold the statements before compacting/renumbering the basic blocks. */
4281 fold_marked_statements (last, id.statements_to_fold);
4282 pointer_set_destroy (id.statements_to_fold);
4284 gcc_assert (!id.debug_stmts);
4286 /* Renumber the (code) basic_blocks consecutively. */
4287 compact_blocks ();
4288 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4289 number_blocks (fn);
4291 fold_cond_expr_cond ();
4292 delete_unreachable_blocks_update_callgraph (&id);
4293 #ifdef ENABLE_CHECKING
4294 verify_cgraph_node (id.dst_node);
4295 #endif
4297 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4298 not possible yet - the IPA passes might make various functions to not
4299 throw and they don't care to proactively update local EH info. This is
4300 done later in fixup_cfg pass that also execute the verification. */
4301 return (TODO_update_ssa
4302 | TODO_cleanup_cfg
4303 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4304 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4307 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4309 tree
4310 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4312 enum tree_code code = TREE_CODE (*tp);
4313 enum tree_code_class cl = TREE_CODE_CLASS (code);
4315 /* We make copies of most nodes. */
4316 if (IS_EXPR_CODE_CLASS (cl)
4317 || code == TREE_LIST
4318 || code == TREE_VEC
4319 || code == TYPE_DECL
4320 || code == OMP_CLAUSE)
4322 /* Because the chain gets clobbered when we make a copy, we save it
4323 here. */
4324 tree chain = NULL_TREE, new_tree;
4326 chain = TREE_CHAIN (*tp);
4328 /* Copy the node. */
4329 new_tree = copy_node (*tp);
4331 /* Propagate mudflap marked-ness. */
4332 if (flag_mudflap && mf_marked_p (*tp))
4333 mf_mark (new_tree);
4335 *tp = new_tree;
4337 /* Now, restore the chain, if appropriate. That will cause
4338 walk_tree to walk into the chain as well. */
4339 if (code == PARM_DECL
4340 || code == TREE_LIST
4341 || code == OMP_CLAUSE)
4342 TREE_CHAIN (*tp) = chain;
4344 /* For now, we don't update BLOCKs when we make copies. So, we
4345 have to nullify all BIND_EXPRs. */
4346 if (TREE_CODE (*tp) == BIND_EXPR)
4347 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4349 else if (code == CONSTRUCTOR)
4351 /* CONSTRUCTOR nodes need special handling because
4352 we need to duplicate the vector of elements. */
4353 tree new_tree;
4355 new_tree = copy_node (*tp);
4357 /* Propagate mudflap marked-ness. */
4358 if (flag_mudflap && mf_marked_p (*tp))
4359 mf_mark (new_tree);
4361 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4362 CONSTRUCTOR_ELTS (*tp));
4363 *tp = new_tree;
4365 else if (TREE_CODE_CLASS (code) == tcc_type)
4366 *walk_subtrees = 0;
4367 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4368 *walk_subtrees = 0;
4369 else if (TREE_CODE_CLASS (code) == tcc_constant)
4370 *walk_subtrees = 0;
4371 else
4372 gcc_assert (code != STATEMENT_LIST);
4373 return NULL_TREE;
4376 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4377 information indicating to what new SAVE_EXPR this one should be mapped,
4378 use that one. Otherwise, create a new node and enter it in ST. FN is
4379 the function into which the copy will be placed. */
4381 static void
4382 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4384 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4385 tree *n;
4386 tree t;
4388 /* See if we already encountered this SAVE_EXPR. */
4389 n = (tree *) pointer_map_contains (st, *tp);
4391 /* If we didn't already remap this SAVE_EXPR, do so now. */
4392 if (!n)
4394 t = copy_node (*tp);
4396 /* Remember this SAVE_EXPR. */
4397 *pointer_map_insert (st, *tp) = t;
4398 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4399 *pointer_map_insert (st, t) = t;
4401 else
4403 /* We've already walked into this SAVE_EXPR; don't do it again. */
4404 *walk_subtrees = 0;
4405 t = *n;
4408 /* Replace this SAVE_EXPR with the copy. */
4409 *tp = t;
4412 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4413 copies the declaration and enters it in the splay_tree in DATA (which is
4414 really an `copy_body_data *'). */
4416 static tree
4417 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4418 void *data)
4420 copy_body_data *id = (copy_body_data *) data;
4422 /* Don't walk into types. */
4423 if (TYPE_P (*tp))
4424 *walk_subtrees = 0;
4426 else if (TREE_CODE (*tp) == LABEL_EXPR)
4428 tree decl = TREE_OPERAND (*tp, 0);
4430 /* Copy the decl and remember the copy. */
4431 insert_decl_map (id, decl, id->copy_decl (decl, id));
4434 return NULL_TREE;
4437 /* Perform any modifications to EXPR required when it is unsaved. Does
4438 not recurse into EXPR's subtrees. */
4440 static void
4441 unsave_expr_1 (tree expr)
4443 switch (TREE_CODE (expr))
4445 case TARGET_EXPR:
4446 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4447 It's OK for this to happen if it was part of a subtree that
4448 isn't immediately expanded, such as operand 2 of another
4449 TARGET_EXPR. */
4450 if (TREE_OPERAND (expr, 1))
4451 break;
4453 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4454 TREE_OPERAND (expr, 3) = NULL_TREE;
4455 break;
4457 default:
4458 break;
4462 /* Called via walk_tree when an expression is unsaved. Using the
4463 splay_tree pointed to by ST (which is really a `splay_tree'),
4464 remaps all local declarations to appropriate replacements. */
4466 static tree
4467 unsave_r (tree *tp, int *walk_subtrees, void *data)
4469 copy_body_data *id = (copy_body_data *) data;
4470 struct pointer_map_t *st = id->decl_map;
4471 tree *n;
4473 /* Only a local declaration (variable or label). */
4474 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4475 || TREE_CODE (*tp) == LABEL_DECL)
4477 /* Lookup the declaration. */
4478 n = (tree *) pointer_map_contains (st, *tp);
4480 /* If it's there, remap it. */
4481 if (n)
4482 *tp = *n;
4485 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4486 gcc_unreachable ();
4487 else if (TREE_CODE (*tp) == BIND_EXPR)
4488 copy_bind_expr (tp, walk_subtrees, id);
4489 else if (TREE_CODE (*tp) == SAVE_EXPR
4490 || TREE_CODE (*tp) == TARGET_EXPR)
4491 remap_save_expr (tp, st, walk_subtrees);
4492 else
4494 copy_tree_r (tp, walk_subtrees, NULL);
4496 /* Do whatever unsaving is required. */
4497 unsave_expr_1 (*tp);
4500 /* Keep iterating. */
4501 return NULL_TREE;
4504 /* Copies everything in EXPR and replaces variables, labels
4505 and SAVE_EXPRs local to EXPR. */
4507 tree
4508 unsave_expr_now (tree expr)
4510 copy_body_data id;
4512 /* There's nothing to do for NULL_TREE. */
4513 if (expr == 0)
4514 return expr;
4516 /* Set up ID. */
4517 memset (&id, 0, sizeof (id));
4518 id.src_fn = current_function_decl;
4519 id.dst_fn = current_function_decl;
4520 id.decl_map = pointer_map_create ();
4521 id.debug_map = NULL;
4523 id.copy_decl = copy_decl_no_change;
4524 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4525 id.transform_new_cfg = false;
4526 id.transform_return_to_modify = false;
4527 id.transform_lang_insert_block = NULL;
4529 /* Walk the tree once to find local labels. */
4530 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4532 /* Walk the tree again, copying, remapping, and unsaving. */
4533 walk_tree (&expr, unsave_r, &id, NULL);
4535 /* Clean up. */
4536 pointer_map_destroy (id.decl_map);
4537 if (id.debug_map)
4538 pointer_map_destroy (id.debug_map);
4540 return expr;
4543 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4544 label, copies the declaration and enters it in the splay_tree in DATA (which
4545 is really a 'copy_body_data *'. */
4547 static tree
4548 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4549 bool *handled_ops_p ATTRIBUTE_UNUSED,
4550 struct walk_stmt_info *wi)
4552 copy_body_data *id = (copy_body_data *) wi->info;
4553 gimple stmt = gsi_stmt (*gsip);
4555 if (gimple_code (stmt) == GIMPLE_LABEL)
4557 tree decl = gimple_label_label (stmt);
4559 /* Copy the decl and remember the copy. */
4560 insert_decl_map (id, decl, id->copy_decl (decl, id));
4563 return NULL_TREE;
4567 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4568 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4569 remaps all local declarations to appropriate replacements in gimple
4570 operands. */
4572 static tree
4573 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4575 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4576 copy_body_data *id = (copy_body_data *) wi->info;
4577 struct pointer_map_t *st = id->decl_map;
4578 tree *n;
4579 tree expr = *tp;
4581 /* Only a local declaration (variable or label). */
4582 if ((TREE_CODE (expr) == VAR_DECL
4583 && !TREE_STATIC (expr))
4584 || TREE_CODE (expr) == LABEL_DECL)
4586 /* Lookup the declaration. */
4587 n = (tree *) pointer_map_contains (st, expr);
4589 /* If it's there, remap it. */
4590 if (n)
4591 *tp = *n;
4592 *walk_subtrees = 0;
4594 else if (TREE_CODE (expr) == STATEMENT_LIST
4595 || TREE_CODE (expr) == BIND_EXPR
4596 || TREE_CODE (expr) == SAVE_EXPR)
4597 gcc_unreachable ();
4598 else if (TREE_CODE (expr) == TARGET_EXPR)
4600 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4601 It's OK for this to happen if it was part of a subtree that
4602 isn't immediately expanded, such as operand 2 of another
4603 TARGET_EXPR. */
4604 if (!TREE_OPERAND (expr, 1))
4606 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4607 TREE_OPERAND (expr, 3) = NULL_TREE;
4611 /* Keep iterating. */
4612 return NULL_TREE;
4616 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4617 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4618 remaps all local declarations to appropriate replacements in gimple
4619 statements. */
4621 static tree
4622 replace_locals_stmt (gimple_stmt_iterator *gsip,
4623 bool *handled_ops_p ATTRIBUTE_UNUSED,
4624 struct walk_stmt_info *wi)
4626 copy_body_data *id = (copy_body_data *) wi->info;
4627 gimple stmt = gsi_stmt (*gsip);
4629 if (gimple_code (stmt) == GIMPLE_BIND)
4631 tree block = gimple_bind_block (stmt);
4633 if (block)
4635 remap_block (&block, id);
4636 gimple_bind_set_block (stmt, block);
4639 /* This will remap a lot of the same decls again, but this should be
4640 harmless. */
4641 if (gimple_bind_vars (stmt))
4642 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4645 /* Keep iterating. */
4646 return NULL_TREE;
4650 /* Copies everything in SEQ and replaces variables and labels local to
4651 current_function_decl. */
4653 gimple_seq
4654 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4656 copy_body_data id;
4657 struct walk_stmt_info wi;
4658 struct pointer_set_t *visited;
4659 gimple_seq copy;
4661 /* There's nothing to do for NULL_TREE. */
4662 if (seq == NULL)
4663 return seq;
4665 /* Set up ID. */
4666 memset (&id, 0, sizeof (id));
4667 id.src_fn = current_function_decl;
4668 id.dst_fn = current_function_decl;
4669 id.decl_map = pointer_map_create ();
4670 id.debug_map = NULL;
4672 id.copy_decl = copy_decl_no_change;
4673 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4674 id.transform_new_cfg = false;
4675 id.transform_return_to_modify = false;
4676 id.transform_lang_insert_block = NULL;
4678 /* Walk the tree once to find local labels. */
4679 memset (&wi, 0, sizeof (wi));
4680 visited = pointer_set_create ();
4681 wi.info = &id;
4682 wi.pset = visited;
4683 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4684 pointer_set_destroy (visited);
4686 copy = gimple_seq_copy (seq);
4688 /* Walk the copy, remapping decls. */
4689 memset (&wi, 0, sizeof (wi));
4690 wi.info = &id;
4691 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4693 /* Clean up. */
4694 pointer_map_destroy (id.decl_map);
4695 if (id.debug_map)
4696 pointer_map_destroy (id.debug_map);
4698 return copy;
4702 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4704 static tree
4705 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4707 if (*tp == data)
4708 return (tree) data;
4709 else
4710 return NULL;
4713 DEBUG_FUNCTION bool
4714 debug_find_tree (tree top, tree search)
4716 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4720 /* Declare the variables created by the inliner. Add all the variables in
4721 VARS to BIND_EXPR. */
4723 static void
4724 declare_inline_vars (tree block, tree vars)
4726 tree t;
4727 for (t = vars; t; t = TREE_CHAIN (t))
4729 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4730 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4731 add_local_decl (cfun, t);
4734 if (block)
4735 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4738 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4739 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4740 VAR_DECL translation. */
4742 static tree
4743 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4745 /* Don't generate debug information for the copy if we wouldn't have
4746 generated it for the copy either. */
4747 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4748 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4750 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4751 declaration inspired this copy. */
4752 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4754 /* The new variable/label has no RTL, yet. */
4755 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4756 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4757 SET_DECL_RTL (copy, 0);
4759 /* These args would always appear unused, if not for this. */
4760 TREE_USED (copy) = 1;
4762 /* Set the context for the new declaration. */
4763 if (!DECL_CONTEXT (decl))
4764 /* Globals stay global. */
4766 else if (DECL_CONTEXT (decl) != id->src_fn)
4767 /* Things that weren't in the scope of the function we're inlining
4768 from aren't in the scope we're inlining to, either. */
4770 else if (TREE_STATIC (decl))
4771 /* Function-scoped static variables should stay in the original
4772 function. */
4774 else
4775 /* Ordinary automatic local variables are now in the scope of the
4776 new function. */
4777 DECL_CONTEXT (copy) = id->dst_fn;
4779 return copy;
4782 static tree
4783 copy_decl_to_var (tree decl, copy_body_data *id)
4785 tree copy, type;
4787 gcc_assert (TREE_CODE (decl) == PARM_DECL
4788 || TREE_CODE (decl) == RESULT_DECL);
4790 type = TREE_TYPE (decl);
4792 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4793 VAR_DECL, DECL_NAME (decl), type);
4794 if (DECL_PT_UID_SET_P (decl))
4795 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4796 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4797 TREE_READONLY (copy) = TREE_READONLY (decl);
4798 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4799 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4801 return copy_decl_for_dup_finish (id, decl, copy);
4804 /* Like copy_decl_to_var, but create a return slot object instead of a
4805 pointer variable for return by invisible reference. */
4807 static tree
4808 copy_result_decl_to_var (tree decl, copy_body_data *id)
4810 tree copy, type;
4812 gcc_assert (TREE_CODE (decl) == PARM_DECL
4813 || TREE_CODE (decl) == RESULT_DECL);
4815 type = TREE_TYPE (decl);
4816 if (DECL_BY_REFERENCE (decl))
4817 type = TREE_TYPE (type);
4819 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4820 VAR_DECL, DECL_NAME (decl), type);
4821 if (DECL_PT_UID_SET_P (decl))
4822 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4823 TREE_READONLY (copy) = TREE_READONLY (decl);
4824 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4825 if (!DECL_BY_REFERENCE (decl))
4827 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4828 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4831 return copy_decl_for_dup_finish (id, decl, copy);
4834 tree
4835 copy_decl_no_change (tree decl, copy_body_data *id)
4837 tree copy;
4839 copy = copy_node (decl);
4841 /* The COPY is not abstract; it will be generated in DST_FN. */
4842 DECL_ABSTRACT (copy) = 0;
4843 lang_hooks.dup_lang_specific_decl (copy);
4845 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4846 been taken; it's for internal bookkeeping in expand_goto_internal. */
4847 if (TREE_CODE (copy) == LABEL_DECL)
4849 TREE_ADDRESSABLE (copy) = 0;
4850 LABEL_DECL_UID (copy) = -1;
4853 return copy_decl_for_dup_finish (id, decl, copy);
4856 static tree
4857 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4859 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4860 return copy_decl_to_var (decl, id);
4861 else
4862 return copy_decl_no_change (decl, id);
4865 /* Return a copy of the function's argument tree. */
4866 static tree
4867 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4868 bitmap args_to_skip, tree *vars)
4870 tree arg, *parg;
4871 tree new_parm = NULL;
4872 int i = 0;
4874 parg = &new_parm;
4876 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4877 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4879 tree new_tree = remap_decl (arg, id);
4880 lang_hooks.dup_lang_specific_decl (new_tree);
4881 *parg = new_tree;
4882 parg = &TREE_CHAIN (new_tree);
4884 else if (!pointer_map_contains (id->decl_map, arg))
4886 /* Make an equivalent VAR_DECL. If the argument was used
4887 as temporary variable later in function, the uses will be
4888 replaced by local variable. */
4889 tree var = copy_decl_to_var (arg, id);
4890 get_var_ann (var);
4891 add_referenced_var (var);
4892 insert_decl_map (id, arg, var);
4893 /* Declare this new variable. */
4894 TREE_CHAIN (var) = *vars;
4895 *vars = var;
4897 return new_parm;
4900 /* Return a copy of the function's static chain. */
4901 static tree
4902 copy_static_chain (tree static_chain, copy_body_data * id)
4904 tree *chain_copy, *pvar;
4906 chain_copy = &static_chain;
4907 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4909 tree new_tree = remap_decl (*pvar, id);
4910 lang_hooks.dup_lang_specific_decl (new_tree);
4911 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4912 *pvar = new_tree;
4914 return static_chain;
4917 /* Return true if the function is allowed to be versioned.
4918 This is a guard for the versioning functionality. */
4920 bool
4921 tree_versionable_function_p (tree fndecl)
4923 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4924 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4927 /* Delete all unreachable basic blocks and update callgraph.
4928 Doing so is somewhat nontrivial because we need to update all clones and
4929 remove inline function that become unreachable. */
4931 static bool
4932 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4934 bool changed = false;
4935 basic_block b, next_bb;
4937 find_unreachable_blocks ();
4939 /* Delete all unreachable basic blocks. */
4941 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4943 next_bb = b->next_bb;
4945 if (!(b->flags & BB_REACHABLE))
4947 gimple_stmt_iterator bsi;
4949 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4950 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4952 struct cgraph_edge *e;
4953 struct cgraph_node *node;
4955 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4957 if (!e->inline_failed)
4958 cgraph_remove_node_and_inline_clones (e->callee);
4959 else
4960 cgraph_remove_edge (e);
4962 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4963 && id->dst_node->clones)
4964 for (node = id->dst_node->clones; node != id->dst_node;)
4966 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4968 if (!e->inline_failed)
4969 cgraph_remove_node_and_inline_clones (e->callee);
4970 else
4971 cgraph_remove_edge (e);
4974 if (node->clones)
4975 node = node->clones;
4976 else if (node->next_sibling_clone)
4977 node = node->next_sibling_clone;
4978 else
4980 while (node != id->dst_node && !node->next_sibling_clone)
4981 node = node->clone_of;
4982 if (node != id->dst_node)
4983 node = node->next_sibling_clone;
4987 delete_basic_block (b);
4988 changed = true;
4992 if (changed)
4993 tidy_fallthru_edges ();
4994 return changed;
4997 /* Update clone info after duplication. */
4999 static void
5000 update_clone_info (copy_body_data * id)
5002 struct cgraph_node *node;
5003 if (!id->dst_node->clones)
5004 return;
5005 for (node = id->dst_node->clones; node != id->dst_node;)
5007 /* First update replace maps to match the new body. */
5008 if (node->clone.tree_map)
5010 unsigned int i;
5011 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
5013 struct ipa_replace_map *replace_info;
5014 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
5015 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5016 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5019 if (node->clones)
5020 node = node->clones;
5021 else if (node->next_sibling_clone)
5022 node = node->next_sibling_clone;
5023 else
5025 while (node != id->dst_node && !node->next_sibling_clone)
5026 node = node->clone_of;
5027 if (node != id->dst_node)
5028 node = node->next_sibling_clone;
5033 /* Create a copy of a function's tree.
5034 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5035 of the original function and the new copied function
5036 respectively. In case we want to replace a DECL
5037 tree with another tree while duplicating the function's
5038 body, TREE_MAP represents the mapping between these
5039 trees. If UPDATE_CLONES is set, the call_stmt fields
5040 of edges of clones of the function will be updated.
5042 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5043 from new version.
5044 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5045 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5047 void
5048 tree_function_versioning (tree old_decl, tree new_decl,
5049 VEC(ipa_replace_map_p,gc)* tree_map,
5050 bool update_clones, bitmap args_to_skip,
5051 bitmap blocks_to_copy, basic_block new_entry)
5053 struct cgraph_node *old_version_node;
5054 struct cgraph_node *new_version_node;
5055 copy_body_data id;
5056 tree p;
5057 unsigned i;
5058 struct ipa_replace_map *replace_info;
5059 basic_block old_entry_block, bb;
5060 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
5062 tree old_current_function_decl = current_function_decl;
5063 tree vars = NULL_TREE;
5065 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5066 && TREE_CODE (new_decl) == FUNCTION_DECL);
5067 DECL_POSSIBLY_INLINED (old_decl) = 1;
5069 old_version_node = cgraph_node (old_decl);
5070 new_version_node = cgraph_node (new_decl);
5072 /* Output the inlining info for this abstract function, since it has been
5073 inlined. If we don't do this now, we can lose the information about the
5074 variables in the function when the blocks get blown away as soon as we
5075 remove the cgraph node. */
5076 (*debug_hooks->outlining_inline_function) (old_decl);
5078 DECL_ARTIFICIAL (new_decl) = 1;
5079 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5080 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5082 /* Prepare the data structures for the tree copy. */
5083 memset (&id, 0, sizeof (id));
5085 /* Generate a new name for the new version. */
5086 id.statements_to_fold = pointer_set_create ();
5088 id.decl_map = pointer_map_create ();
5089 id.debug_map = NULL;
5090 id.src_fn = old_decl;
5091 id.dst_fn = new_decl;
5092 id.src_node = old_version_node;
5093 id.dst_node = new_version_node;
5094 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5095 if (id.src_node->ipa_transforms_to_apply)
5097 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5098 unsigned int i;
5100 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5101 id.src_node->ipa_transforms_to_apply);
5102 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5103 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5104 VEC_index (ipa_opt_pass,
5105 old_transforms_to_apply,
5106 i));
5109 id.copy_decl = copy_decl_no_change;
5110 id.transform_call_graph_edges
5111 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5112 id.transform_new_cfg = true;
5113 id.transform_return_to_modify = false;
5114 id.transform_lang_insert_block = NULL;
5116 current_function_decl = new_decl;
5117 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5118 (DECL_STRUCT_FUNCTION (old_decl));
5119 initialize_cfun (new_decl, old_decl,
5120 old_entry_block->count);
5121 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5122 = id.src_cfun->gimple_df->ipa_pta;
5123 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
5125 /* Copy the function's static chain. */
5126 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5127 if (p)
5128 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5129 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5130 &id);
5132 /* If there's a tree_map, prepare for substitution. */
5133 if (tree_map)
5134 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
5136 gimple init;
5137 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
5138 if (replace_info->replace_p)
5140 tree op = replace_info->new_tree;
5141 if (!replace_info->old_tree)
5143 int i = replace_info->parm_num;
5144 tree parm;
5145 for (parm = DECL_ARGUMENTS (old_decl); i; parm = TREE_CHAIN (parm))
5146 i --;
5147 replace_info->old_tree = parm;
5151 STRIP_NOPS (op);
5153 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5154 op = TREE_OPERAND (op, 0);
5156 if (TREE_CODE (op) == ADDR_EXPR)
5158 op = TREE_OPERAND (op, 0);
5159 while (handled_component_p (op))
5160 op = TREE_OPERAND (op, 0);
5161 if (TREE_CODE (op) == VAR_DECL)
5162 add_referenced_var (op);
5164 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5165 init = setup_one_parameter (&id, replace_info->old_tree,
5166 replace_info->new_tree, id.src_fn,
5167 NULL,
5168 &vars);
5169 if (init)
5170 VEC_safe_push (gimple, heap, init_stmts, init);
5173 /* Copy the function's arguments. */
5174 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5175 DECL_ARGUMENTS (new_decl) =
5176 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5177 args_to_skip, &vars);
5179 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5181 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5182 number_blocks (id.dst_fn);
5184 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5186 if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5187 /* Add local vars. */
5188 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id, false);
5190 /* Copy the Function's body. */
5191 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5192 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5194 if (DECL_RESULT (old_decl) != NULL_TREE)
5196 tree *res_decl = &DECL_RESULT (old_decl);
5197 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
5198 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5201 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5202 number_blocks (new_decl);
5204 /* We want to create the BB unconditionally, so that the addition of
5205 debug stmts doesn't affect BB count, which may in the end cause
5206 codegen differences. */
5207 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5208 while (VEC_length (gimple, init_stmts))
5209 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5210 update_clone_info (&id);
5212 /* Remap the nonlocal_goto_save_area, if any. */
5213 if (cfun->nonlocal_goto_save_area)
5215 struct walk_stmt_info wi;
5217 memset (&wi, 0, sizeof (wi));
5218 wi.info = &id;
5219 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5222 /* Clean up. */
5223 pointer_map_destroy (id.decl_map);
5224 if (id.debug_map)
5225 pointer_map_destroy (id.debug_map);
5226 free_dominance_info (CDI_DOMINATORS);
5227 free_dominance_info (CDI_POST_DOMINATORS);
5229 fold_marked_statements (0, id.statements_to_fold);
5230 pointer_set_destroy (id.statements_to_fold);
5231 fold_cond_expr_cond ();
5232 delete_unreachable_blocks_update_callgraph (&id);
5233 if (id.dst_node->analyzed)
5234 cgraph_rebuild_references ();
5235 update_ssa (TODO_update_ssa);
5237 /* After partial cloning we need to rescale frequencies, so they are
5238 within proper range in the cloned function. */
5239 if (new_entry)
5241 struct cgraph_edge *e;
5242 rebuild_frequencies ();
5244 new_version_node->count = ENTRY_BLOCK_PTR->count;
5245 for (e = new_version_node->callees; e; e = e->next_callee)
5247 basic_block bb = gimple_bb (e->call_stmt);
5248 e->frequency = compute_call_stmt_bb_frequency (current_function_decl, bb);
5249 e->count = bb->count;
5253 free_dominance_info (CDI_DOMINATORS);
5254 free_dominance_info (CDI_POST_DOMINATORS);
5256 gcc_assert (!id.debug_stmts);
5257 VEC_free (gimple, heap, init_stmts);
5258 pop_cfun ();
5259 current_function_decl = old_current_function_decl;
5260 gcc_assert (!current_function_decl
5261 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5262 return;
5265 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5266 the callee and return the inlined body on success. */
5268 tree
5269 maybe_inline_call_in_expr (tree exp)
5271 tree fn = get_callee_fndecl (exp);
5273 /* We can only try to inline "const" functions. */
5274 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5276 struct pointer_map_t *decl_map = pointer_map_create ();
5277 call_expr_arg_iterator iter;
5278 copy_body_data id;
5279 tree param, arg, t;
5281 /* Remap the parameters. */
5282 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5283 param;
5284 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
5285 *pointer_map_insert (decl_map, param) = arg;
5287 memset (&id, 0, sizeof (id));
5288 id.src_fn = fn;
5289 id.dst_fn = current_function_decl;
5290 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5291 id.decl_map = decl_map;
5293 id.copy_decl = copy_decl_no_change;
5294 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5295 id.transform_new_cfg = false;
5296 id.transform_return_to_modify = true;
5297 id.transform_lang_insert_block = false;
5299 /* Make sure not to unshare trees behind the front-end's back
5300 since front-end specific mechanisms may rely on sharing. */
5301 id.regimplify = false;
5302 id.do_not_unshare = true;
5304 /* We're not inside any EH region. */
5305 id.eh_lp_nr = 0;
5307 t = copy_tree_body (&id);
5308 pointer_map_destroy (decl_map);
5310 /* We can only return something suitable for use in a GENERIC
5311 expression tree. */
5312 if (TREE_CODE (t) == MODIFY_EXPR)
5313 return TREE_OPERAND (t, 1);
5316 return NULL_TREE;
5319 /* Duplicate a type, fields and all. */
5321 tree
5322 build_duplicate_type (tree type)
5324 struct copy_body_data id;
5326 memset (&id, 0, sizeof (id));
5327 id.src_fn = current_function_decl;
5328 id.dst_fn = current_function_decl;
5329 id.src_cfun = cfun;
5330 id.decl_map = pointer_map_create ();
5331 id.debug_map = NULL;
5332 id.copy_decl = copy_decl_no_change;
5334 type = remap_type_1 (type, &id);
5336 pointer_map_destroy (id.decl_map);
5337 if (id.debug_map)
5338 pointer_map_destroy (id.debug_map);
5340 TYPE_CANONICAL (type) = type;
5342 return type;
5345 /* Return whether it is safe to inline a function because it used different
5346 target specific options or call site actual types mismatch parameter types.
5347 E is the call edge to be checked. */
5348 bool
5349 tree_can_inline_p (struct cgraph_edge *e)
5351 #if 0
5352 /* This causes a regression in SPEC in that it prevents a cold function from
5353 inlining a hot function. Perhaps this should only apply to functions
5354 that the user declares hot/cold/optimize explicitly. */
5356 /* Don't inline a function with a higher optimization level than the
5357 caller, or with different space constraints (hot/cold functions). */
5358 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5359 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5361 if (caller_tree != callee_tree)
5363 struct cl_optimization *caller_opt
5364 = TREE_OPTIMIZATION ((caller_tree)
5365 ? caller_tree
5366 : optimization_default_node);
5368 struct cl_optimization *callee_opt
5369 = TREE_OPTIMIZATION ((callee_tree)
5370 ? callee_tree
5371 : optimization_default_node);
5373 if ((caller_opt->optimize > callee_opt->optimize)
5374 || (caller_opt->optimize_size != callee_opt->optimize_size))
5375 return false;
5377 #endif
5378 tree caller, callee, lhs;
5380 caller = e->caller->decl;
5381 callee = e->callee->decl;
5383 /* First check that inlining isn't simply forbidden in this case. */
5384 if (inline_forbidden_into_p (caller, callee))
5386 e->inline_failed = CIF_UNSPECIFIED;
5387 gimple_call_set_cannot_inline (e->call_stmt, true);
5388 return false;
5391 /* Allow the backend to decide if inlining is ok. */
5392 if (!targetm.target_option.can_inline_p (caller, callee))
5394 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5395 gimple_call_set_cannot_inline (e->call_stmt, true);
5396 e->call_stmt_cannot_inline_p = true;
5397 return false;
5400 /* Do not inline calls where we cannot triviall work around mismatches
5401 in argument or return types. */
5402 if (e->call_stmt
5403 && ((DECL_RESULT (callee)
5404 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
5405 && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
5406 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
5407 TREE_TYPE (lhs))
5408 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
5409 || !gimple_check_call_args (e->call_stmt)))
5411 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5412 gimple_call_set_cannot_inline (e->call_stmt, true);
5413 e->call_stmt_cannot_inline_p = true;
5414 return false;
5417 return true;