sh.c (sh_promote_function_mode): Use default_promote_function_mode if !sh_promote_pro...
[official-gcc.git] / gcc / tree-inline.c
blob133d916b9c66e2bfcb4962fea0c3ef526452dd25
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h" /* floor_log2 */
27 #include "diagnostic-core.h"
28 #include "tree.h"
29 #include "tree-inline.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "insn-config.h"
34 #include "hashtab.h"
35 #include "langhooks.h"
36 #include "basic-block.h"
37 #include "tree-iterator.h"
38 #include "cgraph.h"
39 #include "intl.h"
40 #include "tree-mudflap.h"
41 #include "tree-flow.h"
42 #include "function.h"
43 #include "tree-flow.h"
44 #include "tree-pretty-print.h"
45 #include "except.h"
46 #include "debug.h"
47 #include "pointer-set.h"
48 #include "ipa-prop.h"
49 #include "value-prof.h"
50 #include "tree-pass.h"
51 #include "target.h"
52 #include "integrate.h"
54 #include "rtl.h" /* FIXME: For asm_str_count. */
56 /* I'm not real happy about this, but we need to handle gimple and
57 non-gimple trees. */
58 #include "gimple.h"
60 /* Inlining, Cloning, Versioning, Parallelization
62 Inlining: a function body is duplicated, but the PARM_DECLs are
63 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64 MODIFY_EXPRs that store to a dedicated returned-value variable.
65 The duplicated eh_region info of the copy will later be appended
66 to the info for the caller; the eh_region info in copied throwing
67 statements and RESX statements are adjusted accordingly.
69 Cloning: (only in C++) We have one body for a con/de/structor, and
70 multiple function decls, each with a unique parameter list.
71 Duplicate the body, using the given splay tree; some parameters
72 will become constants (like 0 or 1).
74 Versioning: a function body is duplicated and the result is a new
75 function rather than into blocks of an existing function as with
76 inlining. Some parameters will become constants.
78 Parallelization: a region of a function is duplicated resulting in
79 a new function. Variables may be replaced with complex expressions
80 to enable shared variable semantics.
82 All of these will simultaneously lookup any callgraph edges. If
83 we're going to inline the duplicated function body, and the given
84 function has some cloned callgraph nodes (one for each place this
85 function will be inlined) those callgraph edges will be duplicated.
86 If we're cloning the body, those callgraph edges will be
87 updated to point into the new body. (Note that the original
88 callgraph node and edge list will not be altered.)
90 See the CALL_EXPR handling case in copy_tree_body_r (). */
92 /* To Do:
94 o In order to make inlining-on-trees work, we pessimized
95 function-local static constants. In particular, they are now
96 always output, even when not addressed. Fix this by treating
97 function-local static constants just like global static
98 constants; the back-end already knows not to output them if they
99 are not needed.
101 o Provide heuristics to clamp inlining of recursive template
102 calls? */
105 /* Weights that estimate_num_insns uses to estimate the size of the
106 produced code. */
108 eni_weights eni_size_weights;
110 /* Weights that estimate_num_insns uses to estimate the time necessary
111 to execute the produced code. */
113 eni_weights eni_time_weights;
115 /* Prototypes. */
117 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
118 static void remap_block (tree *, copy_body_data *);
119 static void copy_bind_expr (tree *, int *, copy_body_data *);
120 static tree mark_local_for_remap_r (tree *, int *, void *);
121 static void unsave_expr_1 (tree);
122 static tree unsave_r (tree *, int *, void *);
123 static void declare_inline_vars (tree, tree);
124 static void remap_save_expr (tree *, void *, int *);
125 static void prepend_lexical_block (tree current_block, tree new_block);
126 static tree copy_decl_to_var (tree, copy_body_data *);
127 static tree copy_result_decl_to_var (tree, copy_body_data *);
128 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
129 static gimple remap_gimple_stmt (gimple, copy_body_data *);
130 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
132 /* Insert a tree->tree mapping for ID. Despite the name suggests
133 that the trees should be variables, it is used for more than that. */
135 void
136 insert_decl_map (copy_body_data *id, tree key, tree value)
138 *pointer_map_insert (id->decl_map, key) = value;
140 /* Always insert an identity map as well. If we see this same new
141 node again, we won't want to duplicate it a second time. */
142 if (key != value)
143 *pointer_map_insert (id->decl_map, value) = value;
146 /* Insert a tree->tree mapping for ID. This is only used for
147 variables. */
149 static void
150 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
152 if (!gimple_in_ssa_p (id->src_cfun))
153 return;
155 if (!MAY_HAVE_DEBUG_STMTS)
156 return;
158 if (!target_for_debug_bind (key))
159 return;
161 gcc_assert (TREE_CODE (key) == PARM_DECL);
162 gcc_assert (TREE_CODE (value) == VAR_DECL);
164 if (!id->debug_map)
165 id->debug_map = pointer_map_create ();
167 *pointer_map_insert (id->debug_map, key) = value;
170 /* If nonzero, we're remapping the contents of inlined debug
171 statements. If negative, an error has occurred, such as a
172 reference to a variable that isn't available in the inlined
173 context. */
174 static int processing_debug_stmt = 0;
176 /* Construct new SSA name for old NAME. ID is the inline context. */
178 static tree
179 remap_ssa_name (tree name, copy_body_data *id)
181 tree new_tree;
182 tree *n;
184 gcc_assert (TREE_CODE (name) == SSA_NAME);
186 n = (tree *) pointer_map_contains (id->decl_map, name);
187 if (n)
188 return unshare_expr (*n);
190 if (processing_debug_stmt)
192 processing_debug_stmt = -1;
193 return name;
196 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
197 in copy_bb. */
198 new_tree = remap_decl (SSA_NAME_VAR (name), id);
200 /* We might've substituted constant or another SSA_NAME for
201 the variable.
203 Replace the SSA name representing RESULT_DECL by variable during
204 inlining: this saves us from need to introduce PHI node in a case
205 return value is just partly initialized. */
206 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
207 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
208 || !id->transform_return_to_modify))
210 struct ptr_info_def *pi;
211 new_tree = make_ssa_name (new_tree, NULL);
212 insert_decl_map (id, name, new_tree);
213 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
214 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
215 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
216 /* At least IPA points-to info can be directly transferred. */
217 if (id->src_cfun->gimple_df
218 && id->src_cfun->gimple_df->ipa_pta
219 && (pi = SSA_NAME_PTR_INFO (name))
220 && !pi->pt.anything)
222 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
223 new_pi->pt = pi->pt;
225 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
227 /* By inlining function having uninitialized variable, we might
228 extend the lifetime (variable might get reused). This cause
229 ICE in the case we end up extending lifetime of SSA name across
230 abnormal edge, but also increase register pressure.
232 We simply initialize all uninitialized vars by 0 except
233 for case we are inlining to very first BB. We can avoid
234 this for all BBs that are not inside strongly connected
235 regions of the CFG, but this is expensive to test. */
236 if (id->entry_bb
237 && is_gimple_reg (SSA_NAME_VAR (name))
238 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
239 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
240 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
241 || EDGE_COUNT (id->entry_bb->preds) != 1))
243 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
244 gimple init_stmt;
246 init_stmt = gimple_build_assign (new_tree,
247 fold_convert (TREE_TYPE (new_tree),
248 integer_zero_node));
249 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
250 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
252 else
254 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
255 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
256 == name)
257 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
261 else
262 insert_decl_map (id, name, new_tree);
263 return new_tree;
266 /* Remap DECL during the copying of the BLOCK tree for the function. */
268 tree
269 remap_decl (tree decl, copy_body_data *id)
271 tree *n;
273 /* We only remap local variables in the current function. */
275 /* See if we have remapped this declaration. */
277 n = (tree *) pointer_map_contains (id->decl_map, decl);
279 if (!n && processing_debug_stmt)
281 processing_debug_stmt = -1;
282 return decl;
285 /* If we didn't already have an equivalent for this declaration,
286 create one now. */
287 if (!n)
289 /* Make a copy of the variable or label. */
290 tree t = id->copy_decl (decl, id);
292 /* Remember it, so that if we encounter this local entity again
293 we can reuse this copy. Do this early because remap_type may
294 need this decl for TYPE_STUB_DECL. */
295 insert_decl_map (id, decl, t);
297 if (!DECL_P (t))
298 return t;
300 /* Remap types, if necessary. */
301 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
302 if (TREE_CODE (t) == TYPE_DECL)
303 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
305 /* Remap sizes as necessary. */
306 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
307 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
309 /* If fields, do likewise for offset and qualifier. */
310 if (TREE_CODE (t) == FIELD_DECL)
312 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
313 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
314 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
317 if (cfun && gimple_in_ssa_p (cfun)
318 && (TREE_CODE (t) == VAR_DECL
319 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
321 get_var_ann (t);
322 add_referenced_var (t);
324 return t;
327 if (id->do_not_unshare)
328 return *n;
329 else
330 return unshare_expr (*n);
333 static tree
334 remap_type_1 (tree type, copy_body_data *id)
336 tree new_tree, t;
338 /* We do need a copy. build and register it now. If this is a pointer or
339 reference type, remap the designated type and make a new pointer or
340 reference type. */
341 if (TREE_CODE (type) == POINTER_TYPE)
343 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
344 TYPE_MODE (type),
345 TYPE_REF_CAN_ALIAS_ALL (type));
346 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
347 new_tree = build_type_attribute_qual_variant (new_tree,
348 TYPE_ATTRIBUTES (type),
349 TYPE_QUALS (type));
350 insert_decl_map (id, type, new_tree);
351 return new_tree;
353 else if (TREE_CODE (type) == REFERENCE_TYPE)
355 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
356 TYPE_MODE (type),
357 TYPE_REF_CAN_ALIAS_ALL (type));
358 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
359 new_tree = build_type_attribute_qual_variant (new_tree,
360 TYPE_ATTRIBUTES (type),
361 TYPE_QUALS (type));
362 insert_decl_map (id, type, new_tree);
363 return new_tree;
365 else
366 new_tree = copy_node (type);
368 insert_decl_map (id, type, new_tree);
370 /* This is a new type, not a copy of an old type. Need to reassociate
371 variants. We can handle everything except the main variant lazily. */
372 t = TYPE_MAIN_VARIANT (type);
373 if (type != t)
375 t = remap_type (t, id);
376 TYPE_MAIN_VARIANT (new_tree) = t;
377 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
378 TYPE_NEXT_VARIANT (t) = new_tree;
380 else
382 TYPE_MAIN_VARIANT (new_tree) = new_tree;
383 TYPE_NEXT_VARIANT (new_tree) = NULL;
386 if (TYPE_STUB_DECL (type))
387 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
389 /* Lazily create pointer and reference types. */
390 TYPE_POINTER_TO (new_tree) = NULL;
391 TYPE_REFERENCE_TO (new_tree) = NULL;
393 switch (TREE_CODE (new_tree))
395 case INTEGER_TYPE:
396 case REAL_TYPE:
397 case FIXED_POINT_TYPE:
398 case ENUMERAL_TYPE:
399 case BOOLEAN_TYPE:
400 t = TYPE_MIN_VALUE (new_tree);
401 if (t && TREE_CODE (t) != INTEGER_CST)
402 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
404 t = TYPE_MAX_VALUE (new_tree);
405 if (t && TREE_CODE (t) != INTEGER_CST)
406 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
407 return new_tree;
409 case FUNCTION_TYPE:
410 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
411 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
412 return new_tree;
414 case ARRAY_TYPE:
415 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
416 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
417 break;
419 case RECORD_TYPE:
420 case UNION_TYPE:
421 case QUAL_UNION_TYPE:
423 tree f, nf = NULL;
425 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
427 t = remap_decl (f, id);
428 DECL_CONTEXT (t) = new_tree;
429 DECL_CHAIN (t) = nf;
430 nf = t;
432 TYPE_FIELDS (new_tree) = nreverse (nf);
434 break;
436 case OFFSET_TYPE:
437 default:
438 /* Shouldn't have been thought variable sized. */
439 gcc_unreachable ();
442 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
443 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
445 return new_tree;
448 tree
449 remap_type (tree type, copy_body_data *id)
451 tree *node;
452 tree tmp;
454 if (type == NULL)
455 return type;
457 /* See if we have remapped this type. */
458 node = (tree *) pointer_map_contains (id->decl_map, type);
459 if (node)
460 return *node;
462 /* The type only needs remapping if it's variably modified. */
463 if (! variably_modified_type_p (type, id->src_fn))
465 insert_decl_map (id, type, type);
466 return type;
469 id->remapping_type_depth++;
470 tmp = remap_type_1 (type, id);
471 id->remapping_type_depth--;
473 return tmp;
476 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
477 is NULL or TYPE has not been remapped before. */
479 static tree
480 remapped_type (tree type, copy_body_data *id)
482 tree *node;
484 if (type == NULL)
485 return type;
487 /* See if we have remapped this type. */
488 node = (tree *) pointer_map_contains (id->decl_map, type);
489 if (node)
490 return *node;
491 else
492 return NULL;
495 /* The type only needs remapping if it's variably modified. */
496 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
498 static bool
499 can_be_nonlocal (tree decl, copy_body_data *id)
501 /* We can not duplicate function decls. */
502 if (TREE_CODE (decl) == FUNCTION_DECL)
503 return true;
505 /* Local static vars must be non-local or we get multiple declaration
506 problems. */
507 if (TREE_CODE (decl) == VAR_DECL
508 && !auto_var_in_fn_p (decl, id->src_fn))
509 return true;
511 /* At the moment dwarf2out can handle only these types of nodes. We
512 can support more later. */
513 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
514 return false;
516 /* We must use global type. We call remapped_type instead of
517 remap_type since we don't want to remap this type here if it
518 hasn't been remapped before. */
519 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
520 return false;
522 /* Wihtout SSA we can't tell if variable is used. */
523 if (!gimple_in_ssa_p (cfun))
524 return false;
526 /* Live variables must be copied so we can attach DECL_RTL. */
527 if (var_ann (decl))
528 return false;
530 return true;
533 static tree
534 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
536 tree old_var;
537 tree new_decls = NULL_TREE;
539 /* Remap its variables. */
540 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
542 tree new_var;
544 if (can_be_nonlocal (old_var, id))
546 if (TREE_CODE (old_var) == VAR_DECL
547 && ! DECL_EXTERNAL (old_var)
548 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
549 add_local_decl (cfun, old_var);
550 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
551 && !DECL_IGNORED_P (old_var)
552 && nonlocalized_list)
553 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
554 continue;
557 /* Remap the variable. */
558 new_var = remap_decl (old_var, id);
560 /* If we didn't remap this variable, we can't mess with its
561 TREE_CHAIN. If we remapped this variable to the return slot, it's
562 already declared somewhere else, so don't declare it here. */
564 if (new_var == id->retvar)
566 else if (!new_var)
568 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
569 && !DECL_IGNORED_P (old_var)
570 && nonlocalized_list)
571 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
573 else
575 gcc_assert (DECL_P (new_var));
576 DECL_CHAIN (new_var) = new_decls;
577 new_decls = new_var;
579 /* Also copy value-expressions. */
580 if (TREE_CODE (new_var) == VAR_DECL
581 && DECL_HAS_VALUE_EXPR_P (new_var))
583 tree tem = DECL_VALUE_EXPR (new_var);
584 bool old_regimplify = id->regimplify;
585 id->remapping_type_depth++;
586 walk_tree (&tem, copy_tree_body_r, id, NULL);
587 id->remapping_type_depth--;
588 id->regimplify = old_regimplify;
589 SET_DECL_VALUE_EXPR (new_var, tem);
594 return nreverse (new_decls);
597 /* Copy the BLOCK to contain remapped versions of the variables
598 therein. And hook the new block into the block-tree. */
600 static void
601 remap_block (tree *block, copy_body_data *id)
603 tree old_block;
604 tree new_block;
606 /* Make the new block. */
607 old_block = *block;
608 new_block = make_node (BLOCK);
609 TREE_USED (new_block) = TREE_USED (old_block);
610 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
611 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
612 BLOCK_NONLOCALIZED_VARS (new_block)
613 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
614 *block = new_block;
616 /* Remap its variables. */
617 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
618 &BLOCK_NONLOCALIZED_VARS (new_block),
619 id);
621 if (id->transform_lang_insert_block)
622 id->transform_lang_insert_block (new_block);
624 /* Remember the remapped block. */
625 insert_decl_map (id, old_block, new_block);
628 /* Copy the whole block tree and root it in id->block. */
629 static tree
630 remap_blocks (tree block, copy_body_data *id)
632 tree t;
633 tree new_tree = block;
635 if (!block)
636 return NULL;
638 remap_block (&new_tree, id);
639 gcc_assert (new_tree != block);
640 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
641 prepend_lexical_block (new_tree, remap_blocks (t, id));
642 /* Blocks are in arbitrary order, but make things slightly prettier and do
643 not swap order when producing a copy. */
644 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
645 return new_tree;
648 static void
649 copy_statement_list (tree *tp)
651 tree_stmt_iterator oi, ni;
652 tree new_tree;
654 new_tree = alloc_stmt_list ();
655 ni = tsi_start (new_tree);
656 oi = tsi_start (*tp);
657 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
658 *tp = new_tree;
660 for (; !tsi_end_p (oi); tsi_next (&oi))
662 tree stmt = tsi_stmt (oi);
663 if (TREE_CODE (stmt) == STATEMENT_LIST)
664 copy_statement_list (&stmt);
665 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
669 static void
670 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
672 tree block = BIND_EXPR_BLOCK (*tp);
673 /* Copy (and replace) the statement. */
674 copy_tree_r (tp, walk_subtrees, NULL);
675 if (block)
677 remap_block (&block, id);
678 BIND_EXPR_BLOCK (*tp) = block;
681 if (BIND_EXPR_VARS (*tp))
682 /* This will remap a lot of the same decls again, but this should be
683 harmless. */
684 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
688 /* Create a new gimple_seq by remapping all the statements in BODY
689 using the inlining information in ID. */
691 static gimple_seq
692 remap_gimple_seq (gimple_seq body, copy_body_data *id)
694 gimple_stmt_iterator si;
695 gimple_seq new_body = NULL;
697 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
699 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
700 gimple_seq_add_stmt (&new_body, new_stmt);
703 return new_body;
707 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
708 block using the mapping information in ID. */
710 static gimple
711 copy_gimple_bind (gimple stmt, copy_body_data *id)
713 gimple new_bind;
714 tree new_block, new_vars;
715 gimple_seq body, new_body;
717 /* Copy the statement. Note that we purposely don't use copy_stmt
718 here because we need to remap statements as we copy. */
719 body = gimple_bind_body (stmt);
720 new_body = remap_gimple_seq (body, id);
722 new_block = gimple_bind_block (stmt);
723 if (new_block)
724 remap_block (&new_block, id);
726 /* This will remap a lot of the same decls again, but this should be
727 harmless. */
728 new_vars = gimple_bind_vars (stmt);
729 if (new_vars)
730 new_vars = remap_decls (new_vars, NULL, id);
732 new_bind = gimple_build_bind (new_vars, new_body, new_block);
734 return new_bind;
738 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
739 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
740 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
741 recursing into the children nodes of *TP. */
743 static tree
744 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
746 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
747 copy_body_data *id = (copy_body_data *) wi_p->info;
748 tree fn = id->src_fn;
750 if (TREE_CODE (*tp) == SSA_NAME)
752 *tp = remap_ssa_name (*tp, id);
753 *walk_subtrees = 0;
754 return NULL;
756 else if (auto_var_in_fn_p (*tp, fn))
758 /* Local variables and labels need to be replaced by equivalent
759 variables. We don't want to copy static variables; there's
760 only one of those, no matter how many times we inline the
761 containing function. Similarly for globals from an outer
762 function. */
763 tree new_decl;
765 /* Remap the declaration. */
766 new_decl = remap_decl (*tp, id);
767 gcc_assert (new_decl);
768 /* Replace this variable with the copy. */
769 STRIP_TYPE_NOPS (new_decl);
770 /* ??? The C++ frontend uses void * pointer zero to initialize
771 any other type. This confuses the middle-end type verification.
772 As cloned bodies do not go through gimplification again the fixup
773 there doesn't trigger. */
774 if (TREE_CODE (new_decl) == INTEGER_CST
775 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
776 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
777 *tp = new_decl;
778 *walk_subtrees = 0;
780 else if (TREE_CODE (*tp) == STATEMENT_LIST)
781 gcc_unreachable ();
782 else if (TREE_CODE (*tp) == SAVE_EXPR)
783 gcc_unreachable ();
784 else if (TREE_CODE (*tp) == LABEL_DECL
785 && (!DECL_CONTEXT (*tp)
786 || decl_function_context (*tp) == id->src_fn))
787 /* These may need to be remapped for EH handling. */
788 *tp = remap_decl (*tp, id);
789 else if (TYPE_P (*tp))
790 /* Types may need remapping as well. */
791 *tp = remap_type (*tp, id);
792 else if (CONSTANT_CLASS_P (*tp))
794 /* If this is a constant, we have to copy the node iff the type
795 will be remapped. copy_tree_r will not copy a constant. */
796 tree new_type = remap_type (TREE_TYPE (*tp), id);
798 if (new_type == TREE_TYPE (*tp))
799 *walk_subtrees = 0;
801 else if (TREE_CODE (*tp) == INTEGER_CST)
802 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
803 TREE_INT_CST_HIGH (*tp));
804 else
806 *tp = copy_node (*tp);
807 TREE_TYPE (*tp) = new_type;
810 else
812 /* Otherwise, just copy the node. Note that copy_tree_r already
813 knows not to copy VAR_DECLs, etc., so this is safe. */
814 if (TREE_CODE (*tp) == MEM_REF)
816 /* We need to re-canonicalize MEM_REFs from inline substitutions
817 that can happen when a pointer argument is an ADDR_EXPR. */
818 tree decl = TREE_OPERAND (*tp, 0);
819 tree *n;
821 /* See remap_ssa_name. */
822 if (TREE_CODE (decl) == SSA_NAME
823 && TREE_CODE (SSA_NAME_VAR (decl)) == RESULT_DECL
824 && id->transform_return_to_modify)
825 decl = SSA_NAME_VAR (decl);
827 n = (tree *) pointer_map_contains (id->decl_map, decl);
828 if (n)
830 tree old = *tp;
831 tree ptr = unshare_expr (*n);
832 tree tem;
833 if ((tem = maybe_fold_offset_to_reference (EXPR_LOCATION (*tp),
834 ptr,
835 TREE_OPERAND (*tp, 1),
836 TREE_TYPE (*tp)))
837 && TREE_THIS_VOLATILE (tem) == TREE_THIS_VOLATILE (old))
839 tree *tem_basep = &tem;
840 while (handled_component_p (*tem_basep))
841 tem_basep = &TREE_OPERAND (*tem_basep, 0);
842 if (TREE_CODE (*tem_basep) == MEM_REF)
843 *tem_basep
844 = build2 (MEM_REF, TREE_TYPE (*tem_basep),
845 TREE_OPERAND (*tem_basep, 0),
846 fold_convert (TREE_TYPE (TREE_OPERAND (*tp, 1)),
847 TREE_OPERAND (*tem_basep, 1)));
848 else
849 *tem_basep
850 = build2 (MEM_REF, TREE_TYPE (*tem_basep),
851 build_fold_addr_expr (*tem_basep),
852 build_int_cst
853 (TREE_TYPE (TREE_OPERAND (*tp, 1)), 0));
854 *tp = tem;
856 else
858 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
859 ptr, TREE_OPERAND (*tp, 1));
860 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
861 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
863 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
864 *walk_subtrees = 0;
865 return NULL;
869 /* Here is the "usual case". Copy this tree node, and then
870 tweak some special cases. */
871 copy_tree_r (tp, walk_subtrees, NULL);
873 /* Global variables we haven't seen yet need to go into referenced
874 vars. If not referenced from types only. */
875 if (gimple_in_ssa_p (cfun)
876 && TREE_CODE (*tp) == VAR_DECL
877 && id->remapping_type_depth == 0
878 && !processing_debug_stmt)
879 add_referenced_var (*tp);
881 /* We should never have TREE_BLOCK set on non-statements. */
882 if (EXPR_P (*tp))
883 gcc_assert (!TREE_BLOCK (*tp));
885 if (TREE_CODE (*tp) != OMP_CLAUSE)
886 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
888 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
890 /* The copied TARGET_EXPR has never been expanded, even if the
891 original node was expanded already. */
892 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
893 TREE_OPERAND (*tp, 3) = NULL_TREE;
895 else if (TREE_CODE (*tp) == ADDR_EXPR)
897 /* Variable substitution need not be simple. In particular,
898 the MEM_REF substitution above. Make sure that
899 TREE_CONSTANT and friends are up-to-date. But make sure
900 to not improperly set TREE_BLOCK on some sub-expressions. */
901 int invariant = is_gimple_min_invariant (*tp);
902 tree block = id->block;
903 id->block = NULL_TREE;
904 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
905 id->block = block;
906 recompute_tree_invariant_for_addr_expr (*tp);
908 /* If this used to be invariant, but is not any longer,
909 then regimplification is probably needed. */
910 if (invariant && !is_gimple_min_invariant (*tp))
911 id->regimplify = true;
913 *walk_subtrees = 0;
917 /* Keep iterating. */
918 return NULL_TREE;
922 /* Called from copy_body_id via walk_tree. DATA is really a
923 `copy_body_data *'. */
925 tree
926 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
928 copy_body_data *id = (copy_body_data *) data;
929 tree fn = id->src_fn;
930 tree new_block;
932 /* Begin by recognizing trees that we'll completely rewrite for the
933 inlining context. Our output for these trees is completely
934 different from out input (e.g. RETURN_EXPR is deleted, and morphs
935 into an edge). Further down, we'll handle trees that get
936 duplicated and/or tweaked. */
938 /* When requested, RETURN_EXPRs should be transformed to just the
939 contained MODIFY_EXPR. The branch semantics of the return will
940 be handled elsewhere by manipulating the CFG rather than a statement. */
941 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
943 tree assignment = TREE_OPERAND (*tp, 0);
945 /* If we're returning something, just turn that into an
946 assignment into the equivalent of the original RESULT_DECL.
947 If the "assignment" is just the result decl, the result
948 decl has already been set (e.g. a recent "foo (&result_decl,
949 ...)"); just toss the entire RETURN_EXPR. */
950 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
952 /* Replace the RETURN_EXPR with (a copy of) the
953 MODIFY_EXPR hanging underneath. */
954 *tp = copy_node (assignment);
956 else /* Else the RETURN_EXPR returns no value. */
958 *tp = NULL;
959 return (tree) (void *)1;
962 else if (TREE_CODE (*tp) == SSA_NAME)
964 *tp = remap_ssa_name (*tp, id);
965 *walk_subtrees = 0;
966 return NULL;
969 /* Local variables and labels need to be replaced by equivalent
970 variables. We don't want to copy static variables; there's only
971 one of those, no matter how many times we inline the containing
972 function. Similarly for globals from an outer function. */
973 else if (auto_var_in_fn_p (*tp, fn))
975 tree new_decl;
977 /* Remap the declaration. */
978 new_decl = remap_decl (*tp, id);
979 gcc_assert (new_decl);
980 /* Replace this variable with the copy. */
981 STRIP_TYPE_NOPS (new_decl);
982 *tp = new_decl;
983 *walk_subtrees = 0;
985 else if (TREE_CODE (*tp) == STATEMENT_LIST)
986 copy_statement_list (tp);
987 else if (TREE_CODE (*tp) == SAVE_EXPR
988 || TREE_CODE (*tp) == TARGET_EXPR)
989 remap_save_expr (tp, id->decl_map, walk_subtrees);
990 else if (TREE_CODE (*tp) == LABEL_DECL
991 && (! DECL_CONTEXT (*tp)
992 || decl_function_context (*tp) == id->src_fn))
993 /* These may need to be remapped for EH handling. */
994 *tp = remap_decl (*tp, id);
995 else if (TREE_CODE (*tp) == BIND_EXPR)
996 copy_bind_expr (tp, walk_subtrees, id);
997 /* Types may need remapping as well. */
998 else if (TYPE_P (*tp))
999 *tp = remap_type (*tp, id);
1001 /* If this is a constant, we have to copy the node iff the type will be
1002 remapped. copy_tree_r will not copy a constant. */
1003 else if (CONSTANT_CLASS_P (*tp))
1005 tree new_type = remap_type (TREE_TYPE (*tp), id);
1007 if (new_type == TREE_TYPE (*tp))
1008 *walk_subtrees = 0;
1010 else if (TREE_CODE (*tp) == INTEGER_CST)
1011 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1012 TREE_INT_CST_HIGH (*tp));
1013 else
1015 *tp = copy_node (*tp);
1016 TREE_TYPE (*tp) = new_type;
1020 /* Otherwise, just copy the node. Note that copy_tree_r already
1021 knows not to copy VAR_DECLs, etc., so this is safe. */
1022 else
1024 /* Here we handle trees that are not completely rewritten.
1025 First we detect some inlining-induced bogosities for
1026 discarding. */
1027 if (TREE_CODE (*tp) == MODIFY_EXPR
1028 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1029 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1031 /* Some assignments VAR = VAR; don't generate any rtl code
1032 and thus don't count as variable modification. Avoid
1033 keeping bogosities like 0 = 0. */
1034 tree decl = TREE_OPERAND (*tp, 0), value;
1035 tree *n;
1037 n = (tree *) pointer_map_contains (id->decl_map, decl);
1038 if (n)
1040 value = *n;
1041 STRIP_TYPE_NOPS (value);
1042 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1044 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1045 return copy_tree_body_r (tp, walk_subtrees, data);
1049 else if (TREE_CODE (*tp) == INDIRECT_REF)
1051 /* Get rid of *& from inline substitutions that can happen when a
1052 pointer argument is an ADDR_EXPR. */
1053 tree decl = TREE_OPERAND (*tp, 0);
1054 tree *n;
1056 n = (tree *) pointer_map_contains (id->decl_map, decl);
1057 if (n)
1059 tree new_tree;
1060 tree old;
1061 /* If we happen to get an ADDR_EXPR in n->value, strip
1062 it manually here as we'll eventually get ADDR_EXPRs
1063 which lie about their types pointed to. In this case
1064 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1065 but we absolutely rely on that. As fold_indirect_ref
1066 does other useful transformations, try that first, though. */
1067 tree type = TREE_TYPE (TREE_TYPE (*n));
1068 if (id->do_not_unshare)
1069 new_tree = *n;
1070 else
1071 new_tree = unshare_expr (*n);
1072 old = *tp;
1073 *tp = gimple_fold_indirect_ref (new_tree);
1074 if (! *tp)
1076 if (TREE_CODE (new_tree) == ADDR_EXPR)
1078 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1079 type, new_tree);
1080 /* ??? We should either assert here or build
1081 a VIEW_CONVERT_EXPR instead of blindly leaking
1082 incompatible types to our IL. */
1083 if (! *tp)
1084 *tp = TREE_OPERAND (new_tree, 0);
1086 else
1088 *tp = build1 (INDIRECT_REF, type, new_tree);
1089 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1090 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1091 TREE_READONLY (*tp) = TREE_READONLY (old);
1092 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1095 *walk_subtrees = 0;
1096 return NULL;
1099 else if (TREE_CODE (*tp) == MEM_REF)
1101 /* We need to re-canonicalize MEM_REFs from inline substitutions
1102 that can happen when a pointer argument is an ADDR_EXPR. */
1103 tree decl = TREE_OPERAND (*tp, 0);
1104 tree *n;
1106 n = (tree *) pointer_map_contains (id->decl_map, decl);
1107 if (n)
1109 tree old = *tp;
1110 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1111 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1112 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1113 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1114 *walk_subtrees = 0;
1115 return NULL;
1119 /* Here is the "usual case". Copy this tree node, and then
1120 tweak some special cases. */
1121 copy_tree_r (tp, walk_subtrees, NULL);
1123 /* Global variables we haven't seen yet needs to go into referenced
1124 vars. If not referenced from types or debug stmts only. */
1125 if (gimple_in_ssa_p (cfun)
1126 && TREE_CODE (*tp) == VAR_DECL
1127 && id->remapping_type_depth == 0
1128 && !processing_debug_stmt)
1129 add_referenced_var (*tp);
1131 /* If EXPR has block defined, map it to newly constructed block.
1132 When inlining we want EXPRs without block appear in the block
1133 of function call if we are not remapping a type. */
1134 if (EXPR_P (*tp))
1136 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1137 if (TREE_BLOCK (*tp))
1139 tree *n;
1140 n = (tree *) pointer_map_contains (id->decl_map,
1141 TREE_BLOCK (*tp));
1142 gcc_assert (n || id->remapping_type_depth != 0);
1143 if (n)
1144 new_block = *n;
1146 TREE_BLOCK (*tp) = new_block;
1149 if (TREE_CODE (*tp) != OMP_CLAUSE)
1150 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1152 /* The copied TARGET_EXPR has never been expanded, even if the
1153 original node was expanded already. */
1154 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1156 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1157 TREE_OPERAND (*tp, 3) = NULL_TREE;
1160 /* Variable substitution need not be simple. In particular, the
1161 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1162 and friends are up-to-date. */
1163 else if (TREE_CODE (*tp) == ADDR_EXPR)
1165 int invariant = is_gimple_min_invariant (*tp);
1166 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1168 /* Handle the case where we substituted an INDIRECT_REF
1169 into the operand of the ADDR_EXPR. */
1170 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1171 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1172 else
1173 recompute_tree_invariant_for_addr_expr (*tp);
1175 /* If this used to be invariant, but is not any longer,
1176 then regimplification is probably needed. */
1177 if (invariant && !is_gimple_min_invariant (*tp))
1178 id->regimplify = true;
1180 *walk_subtrees = 0;
1184 /* Keep iterating. */
1185 return NULL_TREE;
1188 /* Helper for remap_gimple_stmt. Given an EH region number for the
1189 source function, map that to the duplicate EH region number in
1190 the destination function. */
1192 static int
1193 remap_eh_region_nr (int old_nr, copy_body_data *id)
1195 eh_region old_r, new_r;
1196 void **slot;
1198 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1199 slot = pointer_map_contains (id->eh_map, old_r);
1200 new_r = (eh_region) *slot;
1202 return new_r->index;
1205 /* Similar, but operate on INTEGER_CSTs. */
1207 static tree
1208 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1210 int old_nr, new_nr;
1212 old_nr = tree_low_cst (old_t_nr, 0);
1213 new_nr = remap_eh_region_nr (old_nr, id);
1215 return build_int_cst (NULL, new_nr);
1218 /* Helper for copy_bb. Remap statement STMT using the inlining
1219 information in ID. Return the new statement copy. */
1221 static gimple
1222 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1224 gimple copy = NULL;
1225 struct walk_stmt_info wi;
1226 tree new_block;
1227 bool skip_first = false;
1229 /* Begin by recognizing trees that we'll completely rewrite for the
1230 inlining context. Our output for these trees is completely
1231 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1232 into an edge). Further down, we'll handle trees that get
1233 duplicated and/or tweaked. */
1235 /* When requested, GIMPLE_RETURNs should be transformed to just the
1236 contained GIMPLE_ASSIGN. The branch semantics of the return will
1237 be handled elsewhere by manipulating the CFG rather than the
1238 statement. */
1239 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1241 tree retval = gimple_return_retval (stmt);
1243 /* If we're returning something, just turn that into an
1244 assignment into the equivalent of the original RESULT_DECL.
1245 If RETVAL is just the result decl, the result decl has
1246 already been set (e.g. a recent "foo (&result_decl, ...)");
1247 just toss the entire GIMPLE_RETURN. */
1248 if (retval
1249 && (TREE_CODE (retval) != RESULT_DECL
1250 && (TREE_CODE (retval) != SSA_NAME
1251 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1253 copy = gimple_build_assign (id->retvar, retval);
1254 /* id->retvar is already substituted. Skip it on later remapping. */
1255 skip_first = true;
1257 else
1258 return gimple_build_nop ();
1260 else if (gimple_has_substatements (stmt))
1262 gimple_seq s1, s2;
1264 /* When cloning bodies from the C++ front end, we will be handed bodies
1265 in High GIMPLE form. Handle here all the High GIMPLE statements that
1266 have embedded statements. */
1267 switch (gimple_code (stmt))
1269 case GIMPLE_BIND:
1270 copy = copy_gimple_bind (stmt, id);
1271 break;
1273 case GIMPLE_CATCH:
1274 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1275 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1276 break;
1278 case GIMPLE_EH_FILTER:
1279 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1280 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1281 break;
1283 case GIMPLE_TRY:
1284 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1285 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1286 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1287 break;
1289 case GIMPLE_WITH_CLEANUP_EXPR:
1290 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1291 copy = gimple_build_wce (s1);
1292 break;
1294 case GIMPLE_OMP_PARALLEL:
1295 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1296 copy = gimple_build_omp_parallel
1297 (s1,
1298 gimple_omp_parallel_clauses (stmt),
1299 gimple_omp_parallel_child_fn (stmt),
1300 gimple_omp_parallel_data_arg (stmt));
1301 break;
1303 case GIMPLE_OMP_TASK:
1304 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1305 copy = gimple_build_omp_task
1306 (s1,
1307 gimple_omp_task_clauses (stmt),
1308 gimple_omp_task_child_fn (stmt),
1309 gimple_omp_task_data_arg (stmt),
1310 gimple_omp_task_copy_fn (stmt),
1311 gimple_omp_task_arg_size (stmt),
1312 gimple_omp_task_arg_align (stmt));
1313 break;
1315 case GIMPLE_OMP_FOR:
1316 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1317 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1318 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1319 gimple_omp_for_collapse (stmt), s2);
1321 size_t i;
1322 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1324 gimple_omp_for_set_index (copy, i,
1325 gimple_omp_for_index (stmt, i));
1326 gimple_omp_for_set_initial (copy, i,
1327 gimple_omp_for_initial (stmt, i));
1328 gimple_omp_for_set_final (copy, i,
1329 gimple_omp_for_final (stmt, i));
1330 gimple_omp_for_set_incr (copy, i,
1331 gimple_omp_for_incr (stmt, i));
1332 gimple_omp_for_set_cond (copy, i,
1333 gimple_omp_for_cond (stmt, i));
1336 break;
1338 case GIMPLE_OMP_MASTER:
1339 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1340 copy = gimple_build_omp_master (s1);
1341 break;
1343 case GIMPLE_OMP_ORDERED:
1344 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1345 copy = gimple_build_omp_ordered (s1);
1346 break;
1348 case GIMPLE_OMP_SECTION:
1349 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1350 copy = gimple_build_omp_section (s1);
1351 break;
1353 case GIMPLE_OMP_SECTIONS:
1354 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1355 copy = gimple_build_omp_sections
1356 (s1, gimple_omp_sections_clauses (stmt));
1357 break;
1359 case GIMPLE_OMP_SINGLE:
1360 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1361 copy = gimple_build_omp_single
1362 (s1, gimple_omp_single_clauses (stmt));
1363 break;
1365 case GIMPLE_OMP_CRITICAL:
1366 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1367 copy
1368 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1369 break;
1371 default:
1372 gcc_unreachable ();
1375 else
1377 if (gimple_assign_copy_p (stmt)
1378 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1379 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1381 /* Here we handle statements that are not completely rewritten.
1382 First we detect some inlining-induced bogosities for
1383 discarding. */
1385 /* Some assignments VAR = VAR; don't generate any rtl code
1386 and thus don't count as variable modification. Avoid
1387 keeping bogosities like 0 = 0. */
1388 tree decl = gimple_assign_lhs (stmt), value;
1389 tree *n;
1391 n = (tree *) pointer_map_contains (id->decl_map, decl);
1392 if (n)
1394 value = *n;
1395 STRIP_TYPE_NOPS (value);
1396 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1397 return gimple_build_nop ();
1401 if (gimple_debug_bind_p (stmt))
1403 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1404 gimple_debug_bind_get_value (stmt),
1405 stmt);
1406 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1407 return copy;
1410 /* Create a new deep copy of the statement. */
1411 copy = gimple_copy (stmt);
1413 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1414 RESX and EH_DISPATCH. */
1415 if (id->eh_map)
1416 switch (gimple_code (copy))
1418 case GIMPLE_CALL:
1420 tree r, fndecl = gimple_call_fndecl (copy);
1421 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1422 switch (DECL_FUNCTION_CODE (fndecl))
1424 case BUILT_IN_EH_COPY_VALUES:
1425 r = gimple_call_arg (copy, 1);
1426 r = remap_eh_region_tree_nr (r, id);
1427 gimple_call_set_arg (copy, 1, r);
1428 /* FALLTHRU */
1430 case BUILT_IN_EH_POINTER:
1431 case BUILT_IN_EH_FILTER:
1432 r = gimple_call_arg (copy, 0);
1433 r = remap_eh_region_tree_nr (r, id);
1434 gimple_call_set_arg (copy, 0, r);
1435 break;
1437 default:
1438 break;
1441 /* Reset alias info if we didn't apply measures to
1442 keep it valid over inlining by setting DECL_PT_UID. */
1443 if (!id->src_cfun->gimple_df
1444 || !id->src_cfun->gimple_df->ipa_pta)
1445 gimple_call_reset_alias_info (copy);
1447 break;
1449 case GIMPLE_RESX:
1451 int r = gimple_resx_region (copy);
1452 r = remap_eh_region_nr (r, id);
1453 gimple_resx_set_region (copy, r);
1455 break;
1457 case GIMPLE_EH_DISPATCH:
1459 int r = gimple_eh_dispatch_region (copy);
1460 r = remap_eh_region_nr (r, id);
1461 gimple_eh_dispatch_set_region (copy, r);
1463 break;
1465 default:
1466 break;
1470 /* If STMT has a block defined, map it to the newly constructed
1471 block. When inlining we want statements without a block to
1472 appear in the block of the function call. */
1473 new_block = id->block;
1474 if (gimple_block (copy))
1476 tree *n;
1477 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1478 gcc_assert (n);
1479 new_block = *n;
1482 gimple_set_block (copy, new_block);
1484 if (gimple_debug_bind_p (copy))
1485 return copy;
1487 /* Remap all the operands in COPY. */
1488 memset (&wi, 0, sizeof (wi));
1489 wi.info = id;
1490 if (skip_first)
1491 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1492 else
1493 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1495 /* Clear the copied virtual operands. We are not remapping them here
1496 but are going to recreate them from scratch. */
1497 if (gimple_has_mem_ops (copy))
1499 gimple_set_vdef (copy, NULL_TREE);
1500 gimple_set_vuse (copy, NULL_TREE);
1503 return copy;
1507 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1508 later */
1510 static basic_block
1511 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1512 gcov_type count_scale)
1514 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1515 basic_block copy_basic_block;
1516 tree decl;
1517 gcov_type freq;
1518 basic_block prev;
1520 /* Search for previous copied basic block. */
1521 prev = bb->prev_bb;
1522 while (!prev->aux)
1523 prev = prev->prev_bb;
1525 /* create_basic_block() will append every new block to
1526 basic_block_info automatically. */
1527 copy_basic_block = create_basic_block (NULL, (void *) 0,
1528 (basic_block) prev->aux);
1529 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1531 /* We are going to rebuild frequencies from scratch. These values
1532 have just small importance to drive canonicalize_loop_headers. */
1533 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1535 /* We recompute frequencies after inlining, so this is quite safe. */
1536 if (freq > BB_FREQ_MAX)
1537 freq = BB_FREQ_MAX;
1538 copy_basic_block->frequency = freq;
1540 copy_gsi = gsi_start_bb (copy_basic_block);
1542 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1544 gimple stmt = gsi_stmt (gsi);
1545 gimple orig_stmt = stmt;
1547 id->regimplify = false;
1548 stmt = remap_gimple_stmt (stmt, id);
1549 if (gimple_nop_p (stmt))
1550 continue;
1552 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1553 seq_gsi = copy_gsi;
1555 /* With return slot optimization we can end up with
1556 non-gimple (foo *)&this->m, fix that here. */
1557 if (is_gimple_assign (stmt)
1558 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1559 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1561 tree new_rhs;
1562 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1563 gimple_assign_rhs1 (stmt),
1564 true, NULL, false,
1565 GSI_CONTINUE_LINKING);
1566 gimple_assign_set_rhs1 (stmt, new_rhs);
1567 id->regimplify = false;
1570 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1572 if (id->regimplify)
1573 gimple_regimplify_operands (stmt, &seq_gsi);
1575 /* If copy_basic_block has been empty at the start of this iteration,
1576 call gsi_start_bb again to get at the newly added statements. */
1577 if (gsi_end_p (copy_gsi))
1578 copy_gsi = gsi_start_bb (copy_basic_block);
1579 else
1580 gsi_next (&copy_gsi);
1582 /* Process the new statement. The call to gimple_regimplify_operands
1583 possibly turned the statement into multiple statements, we
1584 need to process all of them. */
1587 tree fn;
1589 stmt = gsi_stmt (copy_gsi);
1590 if (is_gimple_call (stmt)
1591 && gimple_call_va_arg_pack_p (stmt)
1592 && id->gimple_call)
1594 /* __builtin_va_arg_pack () should be replaced by
1595 all arguments corresponding to ... in the caller. */
1596 tree p;
1597 gimple new_call;
1598 VEC(tree, heap) *argarray;
1599 size_t nargs = gimple_call_num_args (id->gimple_call);
1600 size_t n;
1602 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1603 nargs--;
1605 /* Create the new array of arguments. */
1606 n = nargs + gimple_call_num_args (stmt);
1607 argarray = VEC_alloc (tree, heap, n);
1608 VEC_safe_grow (tree, heap, argarray, n);
1610 /* Copy all the arguments before '...' */
1611 memcpy (VEC_address (tree, argarray),
1612 gimple_call_arg_ptr (stmt, 0),
1613 gimple_call_num_args (stmt) * sizeof (tree));
1615 /* Append the arguments passed in '...' */
1616 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1617 gimple_call_arg_ptr (id->gimple_call, 0)
1618 + (gimple_call_num_args (id->gimple_call) - nargs),
1619 nargs * sizeof (tree));
1621 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1622 argarray);
1624 VEC_free (tree, heap, argarray);
1626 /* Copy all GIMPLE_CALL flags, location and block, except
1627 GF_CALL_VA_ARG_PACK. */
1628 gimple_call_copy_flags (new_call, stmt);
1629 gimple_call_set_va_arg_pack (new_call, false);
1630 gimple_set_location (new_call, gimple_location (stmt));
1631 gimple_set_block (new_call, gimple_block (stmt));
1632 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1634 gsi_replace (&copy_gsi, new_call, false);
1635 stmt = new_call;
1637 else if (is_gimple_call (stmt)
1638 && id->gimple_call
1639 && (decl = gimple_call_fndecl (stmt))
1640 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1641 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1643 /* __builtin_va_arg_pack_len () should be replaced by
1644 the number of anonymous arguments. */
1645 size_t nargs = gimple_call_num_args (id->gimple_call);
1646 tree count, p;
1647 gimple new_stmt;
1649 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1650 nargs--;
1652 count = build_int_cst (integer_type_node, nargs);
1653 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1654 gsi_replace (&copy_gsi, new_stmt, false);
1655 stmt = new_stmt;
1658 /* Statements produced by inlining can be unfolded, especially
1659 when we constant propagated some operands. We can't fold
1660 them right now for two reasons:
1661 1) folding require SSA_NAME_DEF_STMTs to be correct
1662 2) we can't change function calls to builtins.
1663 So we just mark statement for later folding. We mark
1664 all new statements, instead just statements that has changed
1665 by some nontrivial substitution so even statements made
1666 foldable indirectly are updated. If this turns out to be
1667 expensive, copy_body can be told to watch for nontrivial
1668 changes. */
1669 if (id->statements_to_fold)
1670 pointer_set_insert (id->statements_to_fold, stmt);
1672 /* We're duplicating a CALL_EXPR. Find any corresponding
1673 callgraph edges and update or duplicate them. */
1674 if (is_gimple_call (stmt))
1676 struct cgraph_edge *edge;
1677 int flags;
1679 switch (id->transform_call_graph_edges)
1681 case CB_CGE_DUPLICATE:
1682 edge = cgraph_edge (id->src_node, orig_stmt);
1683 if (edge)
1685 int edge_freq = edge->frequency;
1686 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1687 gimple_uid (stmt),
1688 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1689 edge->frequency, true);
1690 /* We could also just rescale the frequency, but
1691 doing so would introduce roundoff errors and make
1692 verifier unhappy. */
1693 edge->frequency
1694 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1695 copy_basic_block);
1696 if (dump_file
1697 && profile_status_for_function (cfun) != PROFILE_ABSENT
1698 && (edge_freq > edge->frequency + 10
1699 || edge_freq < edge->frequency - 10))
1701 fprintf (dump_file, "Edge frequency estimated by "
1702 "cgraph %i diverge from inliner's estimate %i\n",
1703 edge_freq,
1704 edge->frequency);
1705 fprintf (dump_file,
1706 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1707 bb->index,
1708 bb->frequency,
1709 copy_basic_block->frequency);
1711 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1713 break;
1715 case CB_CGE_MOVE_CLONES:
1716 cgraph_set_call_stmt_including_clones (id->dst_node,
1717 orig_stmt, stmt);
1718 edge = cgraph_edge (id->dst_node, stmt);
1719 break;
1721 case CB_CGE_MOVE:
1722 edge = cgraph_edge (id->dst_node, orig_stmt);
1723 if (edge)
1724 cgraph_set_call_stmt (edge, stmt);
1725 break;
1727 default:
1728 gcc_unreachable ();
1731 /* Constant propagation on argument done during inlining
1732 may create new direct call. Produce an edge for it. */
1733 if ((!edge
1734 || (edge->indirect_inlining_edge
1735 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1736 && (fn = gimple_call_fndecl (stmt)) != NULL)
1738 struct cgraph_node *dest = cgraph_node (fn);
1740 /* We have missing edge in the callgraph. This can happen
1741 when previous inlining turned an indirect call into a
1742 direct call by constant propagating arguments or we are
1743 producing dead clone (for further cloning). In all
1744 other cases we hit a bug (incorrect node sharing is the
1745 most common reason for missing edges). */
1746 gcc_assert (dest->needed || !dest->analyzed
1747 || dest->address_taken
1748 || !id->src_node->analyzed
1749 || !id->dst_node->analyzed);
1750 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1751 cgraph_create_edge_including_clones
1752 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1753 compute_call_stmt_bb_frequency (id->dst_node->decl,
1754 copy_basic_block),
1755 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1756 else
1757 cgraph_create_edge (id->dst_node, dest, stmt,
1758 bb->count,
1759 compute_call_stmt_bb_frequency
1760 (id->dst_node->decl, copy_basic_block),
1761 bb->loop_depth)->inline_failed
1762 = CIF_ORIGINALLY_INDIRECT_CALL;
1763 if (dump_file)
1765 fprintf (dump_file, "Created new direct edge to %s\n",
1766 cgraph_node_name (dest));
1770 flags = gimple_call_flags (stmt);
1771 if (flags & ECF_MAY_BE_ALLOCA)
1772 cfun->calls_alloca = true;
1773 if (flags & ECF_RETURNS_TWICE)
1774 cfun->calls_setjmp = true;
1777 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1778 id->eh_map, id->eh_lp_nr);
1780 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1782 ssa_op_iter i;
1783 tree def;
1785 find_new_referenced_vars (gsi_stmt (copy_gsi));
1786 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1787 if (TREE_CODE (def) == SSA_NAME)
1788 SSA_NAME_DEF_STMT (def) = stmt;
1791 gsi_next (&copy_gsi);
1793 while (!gsi_end_p (copy_gsi));
1795 copy_gsi = gsi_last_bb (copy_basic_block);
1798 return copy_basic_block;
1801 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1802 form is quite easy, since dominator relationship for old basic blocks does
1803 not change.
1805 There is however exception where inlining might change dominator relation
1806 across EH edges from basic block within inlined functions destinating
1807 to landing pads in function we inline into.
1809 The function fills in PHI_RESULTs of such PHI nodes if they refer
1810 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1811 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1812 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1813 set, and this means that there will be no overlapping live ranges
1814 for the underlying symbol.
1816 This might change in future if we allow redirecting of EH edges and
1817 we might want to change way build CFG pre-inlining to include
1818 all the possible edges then. */
1819 static void
1820 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1821 bool can_throw, bool nonlocal_goto)
1823 edge e;
1824 edge_iterator ei;
1826 FOR_EACH_EDGE (e, ei, bb->succs)
1827 if (!e->dest->aux
1828 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1830 gimple phi;
1831 gimple_stmt_iterator si;
1833 if (!nonlocal_goto)
1834 gcc_assert (e->flags & EDGE_EH);
1836 if (!can_throw)
1837 gcc_assert (!(e->flags & EDGE_EH));
1839 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1841 edge re;
1843 phi = gsi_stmt (si);
1845 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1846 gcc_assert (!e->dest->aux);
1848 gcc_assert ((e->flags & EDGE_EH)
1849 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1851 if (!is_gimple_reg (PHI_RESULT (phi)))
1853 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1854 continue;
1857 re = find_edge (ret_bb, e->dest);
1858 gcc_assert (re);
1859 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1860 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1862 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1863 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1869 /* Copy edges from BB into its copy constructed earlier, scale profile
1870 accordingly. Edges will be taken care of later. Assume aux
1871 pointers to point to the copies of each BB. Return true if any
1872 debug stmts are left after a statement that must end the basic block. */
1874 static bool
1875 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1877 basic_block new_bb = (basic_block) bb->aux;
1878 edge_iterator ei;
1879 edge old_edge;
1880 gimple_stmt_iterator si;
1881 int flags;
1882 bool need_debug_cleanup = false;
1884 /* Use the indices from the original blocks to create edges for the
1885 new ones. */
1886 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1887 if (!(old_edge->flags & EDGE_EH))
1889 edge new_edge;
1891 flags = old_edge->flags;
1893 /* Return edges do get a FALLTHRU flag when the get inlined. */
1894 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1895 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1896 flags |= EDGE_FALLTHRU;
1897 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1898 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1899 new_edge->probability = old_edge->probability;
1902 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1903 return false;
1905 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1907 gimple copy_stmt;
1908 bool can_throw, nonlocal_goto;
1910 copy_stmt = gsi_stmt (si);
1911 if (!is_gimple_debug (copy_stmt))
1913 update_stmt (copy_stmt);
1914 if (gimple_in_ssa_p (cfun))
1915 mark_symbols_for_renaming (copy_stmt);
1918 /* Do this before the possible split_block. */
1919 gsi_next (&si);
1921 /* If this tree could throw an exception, there are two
1922 cases where we need to add abnormal edge(s): the
1923 tree wasn't in a region and there is a "current
1924 region" in the caller; or the original tree had
1925 EH edges. In both cases split the block after the tree,
1926 and add abnormal edge(s) as needed; we need both
1927 those from the callee and the caller.
1928 We check whether the copy can throw, because the const
1929 propagation can change an INDIRECT_REF which throws
1930 into a COMPONENT_REF which doesn't. If the copy
1931 can throw, the original could also throw. */
1932 can_throw = stmt_can_throw_internal (copy_stmt);
1933 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1935 if (can_throw || nonlocal_goto)
1937 if (!gsi_end_p (si))
1939 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1940 gsi_next (&si);
1941 if (gsi_end_p (si))
1942 need_debug_cleanup = true;
1944 if (!gsi_end_p (si))
1945 /* Note that bb's predecessor edges aren't necessarily
1946 right at this point; split_block doesn't care. */
1948 edge e = split_block (new_bb, copy_stmt);
1950 new_bb = e->dest;
1951 new_bb->aux = e->src->aux;
1952 si = gsi_start_bb (new_bb);
1956 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1957 make_eh_dispatch_edges (copy_stmt);
1958 else if (can_throw)
1959 make_eh_edges (copy_stmt);
1961 if (nonlocal_goto)
1962 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1964 if ((can_throw || nonlocal_goto)
1965 && gimple_in_ssa_p (cfun))
1966 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1967 can_throw, nonlocal_goto);
1969 return need_debug_cleanup;
1972 /* Copy the PHIs. All blocks and edges are copied, some blocks
1973 was possibly split and new outgoing EH edges inserted.
1974 BB points to the block of original function and AUX pointers links
1975 the original and newly copied blocks. */
1977 static void
1978 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1980 basic_block const new_bb = (basic_block) bb->aux;
1981 edge_iterator ei;
1982 gimple phi;
1983 gimple_stmt_iterator si;
1984 edge new_edge;
1985 bool inserted = false;
1987 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1989 tree res, new_res;
1990 gimple new_phi;
1992 phi = gsi_stmt (si);
1993 res = PHI_RESULT (phi);
1994 new_res = res;
1995 if (is_gimple_reg (res))
1997 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1998 SSA_NAME_DEF_STMT (new_res)
1999 = new_phi = create_phi_node (new_res, new_bb);
2000 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2002 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2003 tree arg;
2004 tree new_arg;
2005 tree block = id->block;
2006 edge_iterator ei2;
2008 /* When doing partial cloning, we allow PHIs on the entry block
2009 as long as all the arguments are the same. Find any input
2010 edge to see argument to copy. */
2011 if (!old_edge)
2012 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2013 if (!old_edge->src->aux)
2014 break;
2016 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2017 new_arg = arg;
2018 id->block = NULL_TREE;
2019 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2020 id->block = block;
2021 gcc_assert (new_arg);
2022 /* With return slot optimization we can end up with
2023 non-gimple (foo *)&this->m, fix that here. */
2024 if (TREE_CODE (new_arg) != SSA_NAME
2025 && TREE_CODE (new_arg) != FUNCTION_DECL
2026 && !is_gimple_val (new_arg))
2028 gimple_seq stmts = NULL;
2029 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2030 gsi_insert_seq_on_edge (new_edge, stmts);
2031 inserted = true;
2033 add_phi_arg (new_phi, new_arg, new_edge,
2034 gimple_phi_arg_location_from_edge (phi, old_edge));
2039 /* Commit the delayed edge insertions. */
2040 if (inserted)
2041 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2042 gsi_commit_one_edge_insert (new_edge, NULL);
2046 /* Wrapper for remap_decl so it can be used as a callback. */
2048 static tree
2049 remap_decl_1 (tree decl, void *data)
2051 return remap_decl (decl, (copy_body_data *) data);
2054 /* Build struct function and associated datastructures for the new clone
2055 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2057 static void
2058 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2060 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2061 gcov_type count_scale;
2063 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2064 count_scale = (REG_BR_PROB_BASE * count
2065 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2066 else
2067 count_scale = REG_BR_PROB_BASE;
2069 /* Register specific tree functions. */
2070 gimple_register_cfg_hooks ();
2072 /* Get clean struct function. */
2073 push_struct_function (new_fndecl);
2075 /* We will rebuild these, so just sanity check that they are empty. */
2076 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2077 gcc_assert (cfun->local_decls == NULL);
2078 gcc_assert (cfun->cfg == NULL);
2079 gcc_assert (cfun->decl == new_fndecl);
2081 /* Copy items we preserve during cloning. */
2082 cfun->static_chain_decl = src_cfun->static_chain_decl;
2083 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2084 cfun->function_end_locus = src_cfun->function_end_locus;
2085 cfun->curr_properties = src_cfun->curr_properties;
2086 cfun->last_verified = src_cfun->last_verified;
2087 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2088 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2089 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2090 cfun->stdarg = src_cfun->stdarg;
2091 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2092 cfun->after_inlining = src_cfun->after_inlining;
2093 cfun->can_throw_non_call_exceptions
2094 = src_cfun->can_throw_non_call_exceptions;
2095 cfun->returns_struct = src_cfun->returns_struct;
2096 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2097 cfun->after_tree_profile = src_cfun->after_tree_profile;
2099 init_empty_tree_cfg ();
2101 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2102 ENTRY_BLOCK_PTR->count =
2103 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2104 REG_BR_PROB_BASE);
2105 ENTRY_BLOCK_PTR->frequency
2106 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2107 EXIT_BLOCK_PTR->count =
2108 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2109 REG_BR_PROB_BASE);
2110 EXIT_BLOCK_PTR->frequency =
2111 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2112 if (src_cfun->eh)
2113 init_eh_for_function ();
2115 if (src_cfun->gimple_df)
2117 init_tree_ssa (cfun);
2118 cfun->gimple_df->in_ssa_p = true;
2119 init_ssa_operands ();
2121 pop_cfun ();
2124 /* Helper function for copy_cfg_body. Move debug stmts from the end
2125 of NEW_BB to the beginning of successor basic blocks when needed. If the
2126 successor has multiple predecessors, reset them, otherwise keep
2127 their value. */
2129 static void
2130 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2132 edge e;
2133 edge_iterator ei;
2134 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2136 if (gsi_end_p (si)
2137 || gsi_one_before_end_p (si)
2138 || !(stmt_can_throw_internal (gsi_stmt (si))
2139 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2140 return;
2142 FOR_EACH_EDGE (e, ei, new_bb->succs)
2144 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2145 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2146 while (is_gimple_debug (gsi_stmt (ssi)))
2148 gimple stmt = gsi_stmt (ssi), new_stmt;
2149 tree var;
2150 tree value;
2152 /* For the last edge move the debug stmts instead of copying
2153 them. */
2154 if (ei_one_before_end_p (ei))
2156 si = ssi;
2157 gsi_prev (&ssi);
2158 if (!single_pred_p (e->dest))
2159 gimple_debug_bind_reset_value (stmt);
2160 gsi_remove (&si, false);
2161 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2162 continue;
2165 var = gimple_debug_bind_get_var (stmt);
2166 if (single_pred_p (e->dest))
2168 value = gimple_debug_bind_get_value (stmt);
2169 value = unshare_expr (value);
2171 else
2172 value = NULL_TREE;
2173 new_stmt = gimple_build_debug_bind (var, value, stmt);
2174 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2175 VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2176 gsi_prev (&ssi);
2181 /* Make a copy of the body of FN so that it can be inserted inline in
2182 another function. Walks FN via CFG, returns new fndecl. */
2184 static tree
2185 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2186 basic_block entry_block_map, basic_block exit_block_map,
2187 bitmap blocks_to_copy, basic_block new_entry)
2189 tree callee_fndecl = id->src_fn;
2190 /* Original cfun for the callee, doesn't change. */
2191 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2192 struct function *cfun_to_copy;
2193 basic_block bb;
2194 tree new_fndecl = NULL;
2195 bool need_debug_cleanup = false;
2196 gcov_type count_scale;
2197 int last;
2198 int incoming_frequency = 0;
2199 gcov_type incoming_count = 0;
2201 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2202 count_scale = (REG_BR_PROB_BASE * count
2203 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2204 else
2205 count_scale = REG_BR_PROB_BASE;
2207 /* Register specific tree functions. */
2208 gimple_register_cfg_hooks ();
2210 /* If we are inlining just region of the function, make sure to connect new entry
2211 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2212 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2213 probabilities of edges incoming from nonduplicated region. */
2214 if (new_entry)
2216 edge e;
2217 edge_iterator ei;
2219 FOR_EACH_EDGE (e, ei, new_entry->preds)
2220 if (!e->src->aux)
2222 incoming_frequency += EDGE_FREQUENCY (e);
2223 incoming_count += e->count;
2225 incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2226 incoming_frequency
2227 = incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2228 ENTRY_BLOCK_PTR->count = incoming_count;
2229 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2232 /* Must have a CFG here at this point. */
2233 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2234 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2236 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2238 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2239 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2240 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2241 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2243 /* Duplicate any exception-handling regions. */
2244 if (cfun->eh)
2245 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2246 remap_decl_1, id);
2248 /* Use aux pointers to map the original blocks to copy. */
2249 FOR_EACH_BB_FN (bb, cfun_to_copy)
2250 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2252 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2253 bb->aux = new_bb;
2254 new_bb->aux = bb;
2257 last = last_basic_block;
2259 /* Now that we've duplicated the blocks, duplicate their edges. */
2260 FOR_ALL_BB_FN (bb, cfun_to_copy)
2261 if (!blocks_to_copy
2262 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2263 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2265 if (new_entry)
2267 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2268 e->probability = REG_BR_PROB_BASE;
2269 e->count = incoming_count;
2272 if (gimple_in_ssa_p (cfun))
2273 FOR_ALL_BB_FN (bb, cfun_to_copy)
2274 if (!blocks_to_copy
2275 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2276 copy_phis_for_bb (bb, id);
2278 FOR_ALL_BB_FN (bb, cfun_to_copy)
2279 if (bb->aux)
2281 if (need_debug_cleanup
2282 && bb->index != ENTRY_BLOCK
2283 && bb->index != EXIT_BLOCK)
2284 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2285 ((basic_block)bb->aux)->aux = NULL;
2286 bb->aux = NULL;
2289 /* Zero out AUX fields of newly created block during EH edge
2290 insertion. */
2291 for (; last < last_basic_block; last++)
2293 if (need_debug_cleanup)
2294 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2295 BASIC_BLOCK (last)->aux = NULL;
2297 entry_block_map->aux = NULL;
2298 exit_block_map->aux = NULL;
2300 if (id->eh_map)
2302 pointer_map_destroy (id->eh_map);
2303 id->eh_map = NULL;
2306 return new_fndecl;
2309 /* Copy the debug STMT using ID. We deal with these statements in a
2310 special way: if any variable in their VALUE expression wasn't
2311 remapped yet, we won't remap it, because that would get decl uids
2312 out of sync, causing codegen differences between -g and -g0. If
2313 this arises, we drop the VALUE expression altogether. */
2315 static void
2316 copy_debug_stmt (gimple stmt, copy_body_data *id)
2318 tree t, *n;
2319 struct walk_stmt_info wi;
2321 t = id->block;
2322 if (gimple_block (stmt))
2324 tree *n;
2325 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2326 if (n)
2327 t = *n;
2329 gimple_set_block (stmt, t);
2331 /* Remap all the operands in COPY. */
2332 memset (&wi, 0, sizeof (wi));
2333 wi.info = id;
2335 processing_debug_stmt = 1;
2337 t = gimple_debug_bind_get_var (stmt);
2339 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2340 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2342 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2343 t = *n;
2345 else if (TREE_CODE (t) == VAR_DECL
2346 && !TREE_STATIC (t)
2347 && gimple_in_ssa_p (cfun)
2348 && !pointer_map_contains (id->decl_map, t)
2349 && !var_ann (t))
2350 /* T is a non-localized variable. */;
2351 else
2352 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2354 gimple_debug_bind_set_var (stmt, t);
2356 if (gimple_debug_bind_has_value_p (stmt))
2357 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2358 remap_gimple_op_r, &wi, NULL);
2360 /* Punt if any decl couldn't be remapped. */
2361 if (processing_debug_stmt < 0)
2362 gimple_debug_bind_reset_value (stmt);
2364 processing_debug_stmt = 0;
2366 update_stmt (stmt);
2367 if (gimple_in_ssa_p (cfun))
2368 mark_symbols_for_renaming (stmt);
2371 /* Process deferred debug stmts. In order to give values better odds
2372 of being successfully remapped, we delay the processing of debug
2373 stmts until all other stmts that might require remapping are
2374 processed. */
2376 static void
2377 copy_debug_stmts (copy_body_data *id)
2379 size_t i;
2380 gimple stmt;
2382 if (!id->debug_stmts)
2383 return;
2385 FOR_EACH_VEC_ELT (gimple, id->debug_stmts, i, stmt)
2386 copy_debug_stmt (stmt, id);
2388 VEC_free (gimple, heap, id->debug_stmts);
2391 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2392 another function. */
2394 static tree
2395 copy_tree_body (copy_body_data *id)
2397 tree fndecl = id->src_fn;
2398 tree body = DECL_SAVED_TREE (fndecl);
2400 walk_tree (&body, copy_tree_body_r, id, NULL);
2402 return body;
2405 /* Make a copy of the body of FN so that it can be inserted inline in
2406 another function. */
2408 static tree
2409 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2410 basic_block entry_block_map, basic_block exit_block_map,
2411 bitmap blocks_to_copy, basic_block new_entry)
2413 tree fndecl = id->src_fn;
2414 tree body;
2416 /* If this body has a CFG, walk CFG and copy. */
2417 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2418 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2419 blocks_to_copy, new_entry);
2420 copy_debug_stmts (id);
2422 return body;
2425 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2426 defined in function FN, or of a data member thereof. */
2428 static bool
2429 self_inlining_addr_expr (tree value, tree fn)
2431 tree var;
2433 if (TREE_CODE (value) != ADDR_EXPR)
2434 return false;
2436 var = get_base_address (TREE_OPERAND (value, 0));
2438 return var && auto_var_in_fn_p (var, fn);
2441 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2442 lexical block and line number information from base_stmt, if given,
2443 or from the last stmt of the block otherwise. */
2445 static gimple
2446 insert_init_debug_bind (copy_body_data *id,
2447 basic_block bb, tree var, tree value,
2448 gimple base_stmt)
2450 gimple note;
2451 gimple_stmt_iterator gsi;
2452 tree tracked_var;
2454 if (!gimple_in_ssa_p (id->src_cfun))
2455 return NULL;
2457 if (!MAY_HAVE_DEBUG_STMTS)
2458 return NULL;
2460 tracked_var = target_for_debug_bind (var);
2461 if (!tracked_var)
2462 return NULL;
2464 if (bb)
2466 gsi = gsi_last_bb (bb);
2467 if (!base_stmt && !gsi_end_p (gsi))
2468 base_stmt = gsi_stmt (gsi);
2471 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2473 if (bb)
2475 if (!gsi_end_p (gsi))
2476 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2477 else
2478 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2481 return note;
2484 static void
2485 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2487 /* If VAR represents a zero-sized variable, it's possible that the
2488 assignment statement may result in no gimple statements. */
2489 if (init_stmt)
2491 gimple_stmt_iterator si = gsi_last_bb (bb);
2493 /* We can end up with init statements that store to a non-register
2494 from a rhs with a conversion. Handle that here by forcing the
2495 rhs into a temporary. gimple_regimplify_operands is not
2496 prepared to do this for us. */
2497 if (!is_gimple_debug (init_stmt)
2498 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2499 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2500 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2502 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2503 gimple_expr_type (init_stmt),
2504 gimple_assign_rhs1 (init_stmt));
2505 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2506 GSI_NEW_STMT);
2507 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2508 gimple_assign_set_rhs1 (init_stmt, rhs);
2510 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2511 gimple_regimplify_operands (init_stmt, &si);
2512 mark_symbols_for_renaming (init_stmt);
2514 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2516 tree var, def = gimple_assign_lhs (init_stmt);
2518 if (TREE_CODE (def) == SSA_NAME)
2519 var = SSA_NAME_VAR (def);
2520 else
2521 var = def;
2523 insert_init_debug_bind (id, bb, var, def, init_stmt);
2528 /* Initialize parameter P with VALUE. If needed, produce init statement
2529 at the end of BB. When BB is NULL, we return init statement to be
2530 output later. */
2531 static gimple
2532 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2533 basic_block bb, tree *vars)
2535 gimple init_stmt = NULL;
2536 tree var;
2537 tree rhs = value;
2538 tree def = (gimple_in_ssa_p (cfun)
2539 ? gimple_default_def (id->src_cfun, p) : NULL);
2541 if (value
2542 && value != error_mark_node
2543 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2545 if (fold_convertible_p (TREE_TYPE (p), value))
2546 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2547 else
2548 /* ??? For valid (GIMPLE) programs we should not end up here.
2549 Still if something has gone wrong and we end up with truly
2550 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2551 to not leak invalid GIMPLE to the following passes. */
2552 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2555 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2556 here since the type of this decl must be visible to the calling
2557 function. */
2558 var = copy_decl_to_var (p, id);
2560 /* We're actually using the newly-created var. */
2561 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2563 get_var_ann (var);
2564 add_referenced_var (var);
2567 /* Declare this new variable. */
2568 DECL_CHAIN (var) = *vars;
2569 *vars = var;
2571 /* Make gimplifier happy about this variable. */
2572 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2574 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2575 we would not need to create a new variable here at all, if it
2576 weren't for debug info. Still, we can just use the argument
2577 value. */
2578 if (TREE_READONLY (p)
2579 && !TREE_ADDRESSABLE (p)
2580 && value && !TREE_SIDE_EFFECTS (value)
2581 && !def)
2583 /* We may produce non-gimple trees by adding NOPs or introduce
2584 invalid sharing when operand is not really constant.
2585 It is not big deal to prohibit constant propagation here as
2586 we will constant propagate in DOM1 pass anyway. */
2587 if (is_gimple_min_invariant (value)
2588 && useless_type_conversion_p (TREE_TYPE (p),
2589 TREE_TYPE (value))
2590 /* We have to be very careful about ADDR_EXPR. Make sure
2591 the base variable isn't a local variable of the inlined
2592 function, e.g., when doing recursive inlining, direct or
2593 mutually-recursive or whatever, which is why we don't
2594 just test whether fn == current_function_decl. */
2595 && ! self_inlining_addr_expr (value, fn))
2597 insert_decl_map (id, p, value);
2598 insert_debug_decl_map (id, p, var);
2599 return insert_init_debug_bind (id, bb, var, value, NULL);
2603 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2604 that way, when the PARM_DECL is encountered, it will be
2605 automatically replaced by the VAR_DECL. */
2606 insert_decl_map (id, p, var);
2608 /* Even if P was TREE_READONLY, the new VAR should not be.
2609 In the original code, we would have constructed a
2610 temporary, and then the function body would have never
2611 changed the value of P. However, now, we will be
2612 constructing VAR directly. The constructor body may
2613 change its value multiple times as it is being
2614 constructed. Therefore, it must not be TREE_READONLY;
2615 the back-end assumes that TREE_READONLY variable is
2616 assigned to only once. */
2617 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2618 TREE_READONLY (var) = 0;
2620 /* If there is no setup required and we are in SSA, take the easy route
2621 replacing all SSA names representing the function parameter by the
2622 SSA name passed to function.
2624 We need to construct map for the variable anyway as it might be used
2625 in different SSA names when parameter is set in function.
2627 Do replacement at -O0 for const arguments replaced by constant.
2628 This is important for builtin_constant_p and other construct requiring
2629 constant argument to be visible in inlined function body. */
2630 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2631 && (optimize
2632 || (TREE_READONLY (p)
2633 && is_gimple_min_invariant (rhs)))
2634 && (TREE_CODE (rhs) == SSA_NAME
2635 || is_gimple_min_invariant (rhs))
2636 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2638 insert_decl_map (id, def, rhs);
2639 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2642 /* If the value of argument is never used, don't care about initializing
2643 it. */
2644 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2646 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2647 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2650 /* Initialize this VAR_DECL from the equivalent argument. Convert
2651 the argument to the proper type in case it was promoted. */
2652 if (value)
2654 if (rhs == error_mark_node)
2656 insert_decl_map (id, p, var);
2657 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2660 STRIP_USELESS_TYPE_CONVERSION (rhs);
2662 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2663 keep our trees in gimple form. */
2664 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2666 def = remap_ssa_name (def, id);
2667 init_stmt = gimple_build_assign (def, rhs);
2668 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2669 set_default_def (var, NULL);
2671 else
2672 init_stmt = gimple_build_assign (var, rhs);
2674 if (bb && init_stmt)
2675 insert_init_stmt (id, bb, init_stmt);
2677 return init_stmt;
2680 /* Generate code to initialize the parameters of the function at the
2681 top of the stack in ID from the GIMPLE_CALL STMT. */
2683 static void
2684 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2685 tree fn, basic_block bb)
2687 tree parms;
2688 size_t i;
2689 tree p;
2690 tree vars = NULL_TREE;
2691 tree static_chain = gimple_call_chain (stmt);
2693 /* Figure out what the parameters are. */
2694 parms = DECL_ARGUMENTS (fn);
2696 /* Loop through the parameter declarations, replacing each with an
2697 equivalent VAR_DECL, appropriately initialized. */
2698 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2700 tree val;
2701 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2702 setup_one_parameter (id, p, val, fn, bb, &vars);
2704 /* After remapping parameters remap their types. This has to be done
2705 in a second loop over all parameters to appropriately remap
2706 variable sized arrays when the size is specified in a
2707 parameter following the array. */
2708 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2710 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2711 if (varp
2712 && TREE_CODE (*varp) == VAR_DECL)
2714 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2715 ? gimple_default_def (id->src_cfun, p) : NULL);
2716 tree var = *varp;
2717 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2718 /* Also remap the default definition if it was remapped
2719 to the default definition of the parameter replacement
2720 by the parameter setup. */
2721 if (def)
2723 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2724 if (defp
2725 && TREE_CODE (*defp) == SSA_NAME
2726 && SSA_NAME_VAR (*defp) == var)
2727 TREE_TYPE (*defp) = TREE_TYPE (var);
2732 /* Initialize the static chain. */
2733 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2734 gcc_assert (fn != current_function_decl);
2735 if (p)
2737 /* No static chain? Seems like a bug in tree-nested.c. */
2738 gcc_assert (static_chain);
2740 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2743 declare_inline_vars (id->block, vars);
2747 /* Declare a return variable to replace the RESULT_DECL for the
2748 function we are calling. An appropriate DECL_STMT is returned.
2749 The USE_STMT is filled to contain a use of the declaration to
2750 indicate the return value of the function.
2752 RETURN_SLOT, if non-null is place where to store the result. It
2753 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2754 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2756 The return value is a (possibly null) value that holds the result
2757 as seen by the caller. */
2759 static tree
2760 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2761 basic_block entry_bb)
2763 tree callee = id->src_fn;
2764 tree caller = id->dst_fn;
2765 tree result = DECL_RESULT (callee);
2766 tree callee_type = TREE_TYPE (result);
2767 tree caller_type;
2768 tree var, use;
2770 /* Handle type-mismatches in the function declaration return type
2771 vs. the call expression. */
2772 if (modify_dest)
2773 caller_type = TREE_TYPE (modify_dest);
2774 else
2775 caller_type = TREE_TYPE (TREE_TYPE (callee));
2777 /* We don't need to do anything for functions that don't return
2778 anything. */
2779 if (!result || VOID_TYPE_P (callee_type))
2780 return NULL_TREE;
2782 /* If there was a return slot, then the return value is the
2783 dereferenced address of that object. */
2784 if (return_slot)
2786 /* The front end shouldn't have used both return_slot and
2787 a modify expression. */
2788 gcc_assert (!modify_dest);
2789 if (DECL_BY_REFERENCE (result))
2791 tree return_slot_addr = build_fold_addr_expr (return_slot);
2792 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2794 /* We are going to construct *&return_slot and we can't do that
2795 for variables believed to be not addressable.
2797 FIXME: This check possibly can match, because values returned
2798 via return slot optimization are not believed to have address
2799 taken by alias analysis. */
2800 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2801 var = return_slot_addr;
2803 else
2805 var = return_slot;
2806 gcc_assert (TREE_CODE (var) != SSA_NAME);
2807 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2809 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2810 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2811 && !DECL_GIMPLE_REG_P (result)
2812 && DECL_P (var))
2813 DECL_GIMPLE_REG_P (var) = 0;
2814 use = NULL;
2815 goto done;
2818 /* All types requiring non-trivial constructors should have been handled. */
2819 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2821 /* Attempt to avoid creating a new temporary variable. */
2822 if (modify_dest
2823 && TREE_CODE (modify_dest) != SSA_NAME)
2825 bool use_it = false;
2827 /* We can't use MODIFY_DEST if there's type promotion involved. */
2828 if (!useless_type_conversion_p (callee_type, caller_type))
2829 use_it = false;
2831 /* ??? If we're assigning to a variable sized type, then we must
2832 reuse the destination variable, because we've no good way to
2833 create variable sized temporaries at this point. */
2834 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2835 use_it = true;
2837 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2838 reuse it as the result of the call directly. Don't do this if
2839 it would promote MODIFY_DEST to addressable. */
2840 else if (TREE_ADDRESSABLE (result))
2841 use_it = false;
2842 else
2844 tree base_m = get_base_address (modify_dest);
2846 /* If the base isn't a decl, then it's a pointer, and we don't
2847 know where that's going to go. */
2848 if (!DECL_P (base_m))
2849 use_it = false;
2850 else if (is_global_var (base_m))
2851 use_it = false;
2852 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2853 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2854 && !DECL_GIMPLE_REG_P (result)
2855 && DECL_GIMPLE_REG_P (base_m))
2856 use_it = false;
2857 else if (!TREE_ADDRESSABLE (base_m))
2858 use_it = true;
2861 if (use_it)
2863 var = modify_dest;
2864 use = NULL;
2865 goto done;
2869 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2871 var = copy_result_decl_to_var (result, id);
2872 if (gimple_in_ssa_p (cfun))
2874 get_var_ann (var);
2875 add_referenced_var (var);
2878 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2879 add_local_decl (DECL_STRUCT_FUNCTION (caller), var);
2881 /* Do not have the rest of GCC warn about this variable as it should
2882 not be visible to the user. */
2883 TREE_NO_WARNING (var) = 1;
2885 declare_inline_vars (id->block, var);
2887 /* Build the use expr. If the return type of the function was
2888 promoted, convert it back to the expected type. */
2889 use = var;
2890 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2891 use = fold_convert (caller_type, var);
2893 STRIP_USELESS_TYPE_CONVERSION (use);
2895 if (DECL_BY_REFERENCE (result))
2897 TREE_ADDRESSABLE (var) = 1;
2898 var = build_fold_addr_expr (var);
2901 done:
2902 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2903 way, when the RESULT_DECL is encountered, it will be
2904 automatically replaced by the VAR_DECL.
2906 When returning by reference, ensure that RESULT_DECL remaps to
2907 gimple_val. */
2908 if (DECL_BY_REFERENCE (result)
2909 && !is_gimple_val (var))
2911 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
2912 if (gimple_in_ssa_p (id->src_cfun))
2914 get_var_ann (temp);
2915 add_referenced_var (temp);
2917 insert_decl_map (id, result, temp);
2918 /* When RESULT_DECL is in SSA form, we need to use it's default_def
2919 SSA_NAME. */
2920 if (gimple_in_ssa_p (id->src_cfun) && gimple_default_def (id->src_cfun, result))
2921 temp = remap_ssa_name (gimple_default_def (id->src_cfun, result), id);
2922 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
2924 else
2925 insert_decl_map (id, result, var);
2927 /* Remember this so we can ignore it in remap_decls. */
2928 id->retvar = var;
2930 return use;
2933 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2934 to a local label. */
2936 static tree
2937 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2939 tree node = *nodep;
2940 tree fn = (tree) fnp;
2942 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2943 return node;
2945 if (TYPE_P (node))
2946 *walk_subtrees = 0;
2948 return NULL_TREE;
2951 /* Determine if the function can be copied. If so return NULL. If
2952 not return a string describng the reason for failure. */
2954 static const char *
2955 copy_forbidden (struct function *fun, tree fndecl)
2957 const char *reason = fun->cannot_be_copied_reason;
2958 tree decl;
2959 unsigned ix;
2961 /* Only examine the function once. */
2962 if (fun->cannot_be_copied_set)
2963 return reason;
2965 /* We cannot copy a function that receives a non-local goto
2966 because we cannot remap the destination label used in the
2967 function that is performing the non-local goto. */
2968 /* ??? Actually, this should be possible, if we work at it.
2969 No doubt there's just a handful of places that simply
2970 assume it doesn't happen and don't substitute properly. */
2971 if (fun->has_nonlocal_label)
2973 reason = G_("function %q+F can never be copied "
2974 "because it receives a non-local goto");
2975 goto fail;
2978 FOR_EACH_LOCAL_DECL (fun, ix, decl)
2979 if (TREE_CODE (decl) == VAR_DECL
2980 && TREE_STATIC (decl)
2981 && !DECL_EXTERNAL (decl)
2982 && DECL_INITIAL (decl)
2983 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2984 has_label_address_in_static_1,
2985 fndecl))
2987 reason = G_("function %q+F can never be copied because it saves "
2988 "address of local label in a static variable");
2989 goto fail;
2992 fail:
2993 fun->cannot_be_copied_reason = reason;
2994 fun->cannot_be_copied_set = true;
2995 return reason;
2999 static const char *inline_forbidden_reason;
3001 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3002 iff a function can not be inlined. Also sets the reason why. */
3004 static tree
3005 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3006 struct walk_stmt_info *wip)
3008 tree fn = (tree) wip->info;
3009 tree t;
3010 gimple stmt = gsi_stmt (*gsi);
3012 switch (gimple_code (stmt))
3014 case GIMPLE_CALL:
3015 /* Refuse to inline alloca call unless user explicitly forced so as
3016 this may change program's memory overhead drastically when the
3017 function using alloca is called in loop. In GCC present in
3018 SPEC2000 inlining into schedule_block cause it to require 2GB of
3019 RAM instead of 256MB. */
3020 if (gimple_alloca_call_p (stmt)
3021 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3023 inline_forbidden_reason
3024 = G_("function %q+F can never be inlined because it uses "
3025 "alloca (override using the always_inline attribute)");
3026 *handled_ops_p = true;
3027 return fn;
3030 t = gimple_call_fndecl (stmt);
3031 if (t == NULL_TREE)
3032 break;
3034 /* We cannot inline functions that call setjmp. */
3035 if (setjmp_call_p (t))
3037 inline_forbidden_reason
3038 = G_("function %q+F can never be inlined because it uses setjmp");
3039 *handled_ops_p = true;
3040 return t;
3043 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3044 switch (DECL_FUNCTION_CODE (t))
3046 /* We cannot inline functions that take a variable number of
3047 arguments. */
3048 case BUILT_IN_VA_START:
3049 case BUILT_IN_NEXT_ARG:
3050 case BUILT_IN_VA_END:
3051 inline_forbidden_reason
3052 = G_("function %q+F can never be inlined because it "
3053 "uses variable argument lists");
3054 *handled_ops_p = true;
3055 return t;
3057 case BUILT_IN_LONGJMP:
3058 /* We can't inline functions that call __builtin_longjmp at
3059 all. The non-local goto machinery really requires the
3060 destination be in a different function. If we allow the
3061 function calling __builtin_longjmp to be inlined into the
3062 function calling __builtin_setjmp, Things will Go Awry. */
3063 inline_forbidden_reason
3064 = G_("function %q+F can never be inlined because "
3065 "it uses setjmp-longjmp exception handling");
3066 *handled_ops_p = true;
3067 return t;
3069 case BUILT_IN_NONLOCAL_GOTO:
3070 /* Similarly. */
3071 inline_forbidden_reason
3072 = G_("function %q+F can never be inlined because "
3073 "it uses non-local goto");
3074 *handled_ops_p = true;
3075 return t;
3077 case BUILT_IN_RETURN:
3078 case BUILT_IN_APPLY_ARGS:
3079 /* If a __builtin_apply_args caller would be inlined,
3080 it would be saving arguments of the function it has
3081 been inlined into. Similarly __builtin_return would
3082 return from the function the inline has been inlined into. */
3083 inline_forbidden_reason
3084 = G_("function %q+F can never be inlined because "
3085 "it uses __builtin_return or __builtin_apply_args");
3086 *handled_ops_p = true;
3087 return t;
3089 default:
3090 break;
3092 break;
3094 case GIMPLE_GOTO:
3095 t = gimple_goto_dest (stmt);
3097 /* We will not inline a function which uses computed goto. The
3098 addresses of its local labels, which may be tucked into
3099 global storage, are of course not constant across
3100 instantiations, which causes unexpected behavior. */
3101 if (TREE_CODE (t) != LABEL_DECL)
3103 inline_forbidden_reason
3104 = G_("function %q+F can never be inlined "
3105 "because it contains a computed goto");
3106 *handled_ops_p = true;
3107 return t;
3109 break;
3111 default:
3112 break;
3115 *handled_ops_p = false;
3116 return NULL_TREE;
3119 /* Return true if FNDECL is a function that cannot be inlined into
3120 another one. */
3122 static bool
3123 inline_forbidden_p (tree fndecl)
3125 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3126 struct walk_stmt_info wi;
3127 struct pointer_set_t *visited_nodes;
3128 basic_block bb;
3129 bool forbidden_p = false;
3131 /* First check for shared reasons not to copy the code. */
3132 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3133 if (inline_forbidden_reason != NULL)
3134 return true;
3136 /* Next, walk the statements of the function looking for
3137 constraucts we can't handle, or are non-optimal for inlining. */
3138 visited_nodes = pointer_set_create ();
3139 memset (&wi, 0, sizeof (wi));
3140 wi.info = (void *) fndecl;
3141 wi.pset = visited_nodes;
3143 FOR_EACH_BB_FN (bb, fun)
3145 gimple ret;
3146 gimple_seq seq = bb_seq (bb);
3147 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3148 forbidden_p = (ret != NULL);
3149 if (forbidden_p)
3150 break;
3153 pointer_set_destroy (visited_nodes);
3154 return forbidden_p;
3157 /* Return true if CALLEE cannot be inlined into CALLER. */
3159 static bool
3160 inline_forbidden_into_p (tree caller, tree callee)
3162 /* Don't inline if the functions have different EH personalities. */
3163 if (DECL_FUNCTION_PERSONALITY (caller)
3164 && DECL_FUNCTION_PERSONALITY (callee)
3165 && (DECL_FUNCTION_PERSONALITY (caller)
3166 != DECL_FUNCTION_PERSONALITY (callee)))
3167 return true;
3169 /* Don't inline if the callee can throw non-call exceptions but the
3170 caller cannot. */
3171 if (DECL_STRUCT_FUNCTION (callee)
3172 && DECL_STRUCT_FUNCTION (callee)->can_throw_non_call_exceptions
3173 && !(DECL_STRUCT_FUNCTION (caller)
3174 && DECL_STRUCT_FUNCTION (caller)->can_throw_non_call_exceptions))
3175 return true;
3177 return false;
3180 /* Returns nonzero if FN is a function that does not have any
3181 fundamental inline blocking properties. */
3183 bool
3184 tree_inlinable_function_p (tree fn)
3186 bool inlinable = true;
3187 bool do_warning;
3188 tree always_inline;
3190 /* If we've already decided this function shouldn't be inlined,
3191 there's no need to check again. */
3192 if (DECL_UNINLINABLE (fn))
3193 return false;
3195 /* We only warn for functions declared `inline' by the user. */
3196 do_warning = (warn_inline
3197 && DECL_DECLARED_INLINE_P (fn)
3198 && !DECL_NO_INLINE_WARNING_P (fn)
3199 && !DECL_IN_SYSTEM_HEADER (fn));
3201 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3203 if (flag_no_inline
3204 && always_inline == NULL)
3206 if (do_warning)
3207 warning (OPT_Winline, "function %q+F can never be inlined because it "
3208 "is suppressed using -fno-inline", fn);
3209 inlinable = false;
3212 else if (!function_attribute_inlinable_p (fn))
3214 if (do_warning)
3215 warning (OPT_Winline, "function %q+F can never be inlined because it "
3216 "uses attributes conflicting with inlining", fn);
3217 inlinable = false;
3220 else if (inline_forbidden_p (fn))
3222 /* See if we should warn about uninlinable functions. Previously,
3223 some of these warnings would be issued while trying to expand
3224 the function inline, but that would cause multiple warnings
3225 about functions that would for example call alloca. But since
3226 this a property of the function, just one warning is enough.
3227 As a bonus we can now give more details about the reason why a
3228 function is not inlinable. */
3229 if (always_inline)
3230 sorry (inline_forbidden_reason, fn);
3231 else if (do_warning)
3232 warning (OPT_Winline, inline_forbidden_reason, fn);
3234 inlinable = false;
3237 /* Squirrel away the result so that we don't have to check again. */
3238 DECL_UNINLINABLE (fn) = !inlinable;
3240 return inlinable;
3243 /* Estimate the cost of a memory move. Use machine dependent
3244 word size and take possible memcpy call into account. */
3247 estimate_move_cost (tree type)
3249 HOST_WIDE_INT size;
3251 gcc_assert (!VOID_TYPE_P (type));
3253 size = int_size_in_bytes (type);
3255 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3256 /* Cost of a memcpy call, 3 arguments and the call. */
3257 return 4;
3258 else
3259 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3262 /* Returns cost of operation CODE, according to WEIGHTS */
3264 static int
3265 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3266 tree op1 ATTRIBUTE_UNUSED, tree op2)
3268 switch (code)
3270 /* These are "free" conversions, or their presumed cost
3271 is folded into other operations. */
3272 case RANGE_EXPR:
3273 CASE_CONVERT:
3274 case COMPLEX_EXPR:
3275 case PAREN_EXPR:
3276 return 0;
3278 /* Assign cost of 1 to usual operations.
3279 ??? We may consider mapping RTL costs to this. */
3280 case COND_EXPR:
3281 case VEC_COND_EXPR:
3283 case PLUS_EXPR:
3284 case POINTER_PLUS_EXPR:
3285 case MINUS_EXPR:
3286 case MULT_EXPR:
3288 case ADDR_SPACE_CONVERT_EXPR:
3289 case FIXED_CONVERT_EXPR:
3290 case FIX_TRUNC_EXPR:
3292 case NEGATE_EXPR:
3293 case FLOAT_EXPR:
3294 case MIN_EXPR:
3295 case MAX_EXPR:
3296 case ABS_EXPR:
3298 case LSHIFT_EXPR:
3299 case RSHIFT_EXPR:
3300 case LROTATE_EXPR:
3301 case RROTATE_EXPR:
3302 case VEC_LSHIFT_EXPR:
3303 case VEC_RSHIFT_EXPR:
3305 case BIT_IOR_EXPR:
3306 case BIT_XOR_EXPR:
3307 case BIT_AND_EXPR:
3308 case BIT_NOT_EXPR:
3310 case TRUTH_ANDIF_EXPR:
3311 case TRUTH_ORIF_EXPR:
3312 case TRUTH_AND_EXPR:
3313 case TRUTH_OR_EXPR:
3314 case TRUTH_XOR_EXPR:
3315 case TRUTH_NOT_EXPR:
3317 case LT_EXPR:
3318 case LE_EXPR:
3319 case GT_EXPR:
3320 case GE_EXPR:
3321 case EQ_EXPR:
3322 case NE_EXPR:
3323 case ORDERED_EXPR:
3324 case UNORDERED_EXPR:
3326 case UNLT_EXPR:
3327 case UNLE_EXPR:
3328 case UNGT_EXPR:
3329 case UNGE_EXPR:
3330 case UNEQ_EXPR:
3331 case LTGT_EXPR:
3333 case CONJ_EXPR:
3335 case PREDECREMENT_EXPR:
3336 case PREINCREMENT_EXPR:
3337 case POSTDECREMENT_EXPR:
3338 case POSTINCREMENT_EXPR:
3340 case REALIGN_LOAD_EXPR:
3342 case REDUC_MAX_EXPR:
3343 case REDUC_MIN_EXPR:
3344 case REDUC_PLUS_EXPR:
3345 case WIDEN_SUM_EXPR:
3346 case WIDEN_MULT_EXPR:
3347 case DOT_PROD_EXPR:
3348 case WIDEN_MULT_PLUS_EXPR:
3349 case WIDEN_MULT_MINUS_EXPR:
3351 case VEC_WIDEN_MULT_HI_EXPR:
3352 case VEC_WIDEN_MULT_LO_EXPR:
3353 case VEC_UNPACK_HI_EXPR:
3354 case VEC_UNPACK_LO_EXPR:
3355 case VEC_UNPACK_FLOAT_HI_EXPR:
3356 case VEC_UNPACK_FLOAT_LO_EXPR:
3357 case VEC_PACK_TRUNC_EXPR:
3358 case VEC_PACK_SAT_EXPR:
3359 case VEC_PACK_FIX_TRUNC_EXPR:
3360 case VEC_EXTRACT_EVEN_EXPR:
3361 case VEC_EXTRACT_ODD_EXPR:
3362 case VEC_INTERLEAVE_HIGH_EXPR:
3363 case VEC_INTERLEAVE_LOW_EXPR:
3365 return 1;
3367 /* Few special cases of expensive operations. This is useful
3368 to avoid inlining on functions having too many of these. */
3369 case TRUNC_DIV_EXPR:
3370 case CEIL_DIV_EXPR:
3371 case FLOOR_DIV_EXPR:
3372 case ROUND_DIV_EXPR:
3373 case EXACT_DIV_EXPR:
3374 case TRUNC_MOD_EXPR:
3375 case CEIL_MOD_EXPR:
3376 case FLOOR_MOD_EXPR:
3377 case ROUND_MOD_EXPR:
3378 case RDIV_EXPR:
3379 if (TREE_CODE (op2) != INTEGER_CST)
3380 return weights->div_mod_cost;
3381 return 1;
3383 default:
3384 /* We expect a copy assignment with no operator. */
3385 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3386 return 0;
3391 /* Estimate number of instructions that will be created by expanding
3392 the statements in the statement sequence STMTS.
3393 WEIGHTS contains weights attributed to various constructs. */
3395 static
3396 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3398 int cost;
3399 gimple_stmt_iterator gsi;
3401 cost = 0;
3402 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3403 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3405 return cost;
3409 /* Estimate number of instructions that will be created by expanding STMT.
3410 WEIGHTS contains weights attributed to various constructs. */
3413 estimate_num_insns (gimple stmt, eni_weights *weights)
3415 unsigned cost, i;
3416 enum gimple_code code = gimple_code (stmt);
3417 tree lhs;
3418 tree rhs;
3420 switch (code)
3422 case GIMPLE_ASSIGN:
3423 /* Try to estimate the cost of assignments. We have three cases to
3424 deal with:
3425 1) Simple assignments to registers;
3426 2) Stores to things that must live in memory. This includes
3427 "normal" stores to scalars, but also assignments of large
3428 structures, or constructors of big arrays;
3430 Let us look at the first two cases, assuming we have "a = b + C":
3431 <GIMPLE_ASSIGN <var_decl "a">
3432 <plus_expr <var_decl "b"> <constant C>>
3433 If "a" is a GIMPLE register, the assignment to it is free on almost
3434 any target, because "a" usually ends up in a real register. Hence
3435 the only cost of this expression comes from the PLUS_EXPR, and we
3436 can ignore the GIMPLE_ASSIGN.
3437 If "a" is not a GIMPLE register, the assignment to "a" will most
3438 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3439 of moving something into "a", which we compute using the function
3440 estimate_move_cost. */
3441 lhs = gimple_assign_lhs (stmt);
3442 rhs = gimple_assign_rhs1 (stmt);
3444 if (is_gimple_reg (lhs))
3445 cost = 0;
3446 else
3447 cost = estimate_move_cost (TREE_TYPE (lhs));
3449 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3450 cost += estimate_move_cost (TREE_TYPE (rhs));
3452 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3453 gimple_assign_rhs1 (stmt),
3454 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3455 == GIMPLE_BINARY_RHS
3456 ? gimple_assign_rhs2 (stmt) : NULL);
3457 break;
3459 case GIMPLE_COND:
3460 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3461 gimple_op (stmt, 0),
3462 gimple_op (stmt, 1));
3463 break;
3465 case GIMPLE_SWITCH:
3466 /* Take into account cost of the switch + guess 2 conditional jumps for
3467 each case label.
3469 TODO: once the switch expansion logic is sufficiently separated, we can
3470 do better job on estimating cost of the switch. */
3471 if (weights->time_based)
3472 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3473 else
3474 cost = gimple_switch_num_labels (stmt) * 2;
3475 break;
3477 case GIMPLE_CALL:
3479 tree decl = gimple_call_fndecl (stmt);
3480 tree addr = gimple_call_fn (stmt);
3481 tree funtype = TREE_TYPE (addr);
3482 bool stdarg = false;
3484 if (POINTER_TYPE_P (funtype))
3485 funtype = TREE_TYPE (funtype);
3487 if (is_simple_builtin (decl))
3488 return 0;
3489 else if (is_inexpensive_builtin (decl))
3490 cost = weights->target_builtin_call_cost;
3491 else
3492 cost = weights->call_cost;
3494 if (decl)
3495 funtype = TREE_TYPE (decl);
3497 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3498 cost += estimate_move_cost (TREE_TYPE (funtype));
3500 if (funtype)
3501 stdarg = stdarg_p (funtype);
3503 /* Our cost must be kept in sync with
3504 cgraph_estimate_size_after_inlining that does use function
3505 declaration to figure out the arguments.
3507 For functions taking variable list of arguments we must
3508 look into call statement intself. This is safe because
3509 we will get only higher costs and in most cases we will
3510 not inline these anyway. */
3511 if (decl && DECL_ARGUMENTS (decl) && !stdarg)
3513 tree arg;
3514 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
3515 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3516 cost += estimate_move_cost (TREE_TYPE (arg));
3518 else if (funtype && prototype_p (funtype) && !stdarg)
3520 tree t;
3521 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3522 t = TREE_CHAIN (t))
3523 if (!VOID_TYPE_P (TREE_VALUE (t)))
3524 cost += estimate_move_cost (TREE_VALUE (t));
3526 else
3528 for (i = 0; i < gimple_call_num_args (stmt); i++)
3530 tree arg = gimple_call_arg (stmt, i);
3531 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3532 cost += estimate_move_cost (TREE_TYPE (arg));
3536 break;
3539 case GIMPLE_GOTO:
3540 case GIMPLE_LABEL:
3541 case GIMPLE_NOP:
3542 case GIMPLE_PHI:
3543 case GIMPLE_RETURN:
3544 case GIMPLE_PREDICT:
3545 case GIMPLE_DEBUG:
3546 return 0;
3548 case GIMPLE_ASM:
3549 return asm_str_count (gimple_asm_string (stmt));
3551 case GIMPLE_RESX:
3552 /* This is either going to be an external function call with one
3553 argument, or two register copy statements plus a goto. */
3554 return 2;
3556 case GIMPLE_EH_DISPATCH:
3557 /* ??? This is going to turn into a switch statement. Ideally
3558 we'd have a look at the eh region and estimate the number of
3559 edges involved. */
3560 return 10;
3562 case GIMPLE_BIND:
3563 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3565 case GIMPLE_EH_FILTER:
3566 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3568 case GIMPLE_CATCH:
3569 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3571 case GIMPLE_TRY:
3572 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3573 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3575 /* OpenMP directives are generally very expensive. */
3577 case GIMPLE_OMP_RETURN:
3578 case GIMPLE_OMP_SECTIONS_SWITCH:
3579 case GIMPLE_OMP_ATOMIC_STORE:
3580 case GIMPLE_OMP_CONTINUE:
3581 /* ...except these, which are cheap. */
3582 return 0;
3584 case GIMPLE_OMP_ATOMIC_LOAD:
3585 return weights->omp_cost;
3587 case GIMPLE_OMP_FOR:
3588 return (weights->omp_cost
3589 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3590 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3592 case GIMPLE_OMP_PARALLEL:
3593 case GIMPLE_OMP_TASK:
3594 case GIMPLE_OMP_CRITICAL:
3595 case GIMPLE_OMP_MASTER:
3596 case GIMPLE_OMP_ORDERED:
3597 case GIMPLE_OMP_SECTION:
3598 case GIMPLE_OMP_SECTIONS:
3599 case GIMPLE_OMP_SINGLE:
3600 return (weights->omp_cost
3601 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3603 default:
3604 gcc_unreachable ();
3607 return cost;
3610 /* Estimate number of instructions that will be created by expanding
3611 function FNDECL. WEIGHTS contains weights attributed to various
3612 constructs. */
3615 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3617 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3618 gimple_stmt_iterator bsi;
3619 basic_block bb;
3620 int n = 0;
3622 gcc_assert (my_function && my_function->cfg);
3623 FOR_EACH_BB_FN (bb, my_function)
3625 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3626 n += estimate_num_insns (gsi_stmt (bsi), weights);
3629 return n;
3633 /* Initializes weights used by estimate_num_insns. */
3635 void
3636 init_inline_once (void)
3638 eni_size_weights.call_cost = 1;
3639 eni_size_weights.target_builtin_call_cost = 1;
3640 eni_size_weights.div_mod_cost = 1;
3641 eni_size_weights.omp_cost = 40;
3642 eni_size_weights.time_based = false;
3644 /* Estimating time for call is difficult, since we have no idea what the
3645 called function does. In the current uses of eni_time_weights,
3646 underestimating the cost does less harm than overestimating it, so
3647 we choose a rather small value here. */
3648 eni_time_weights.call_cost = 10;
3649 eni_time_weights.target_builtin_call_cost = 10;
3650 eni_time_weights.div_mod_cost = 10;
3651 eni_time_weights.omp_cost = 40;
3652 eni_time_weights.time_based = true;
3655 /* Estimate the number of instructions in a gimple_seq. */
3658 count_insns_seq (gimple_seq seq, eni_weights *weights)
3660 gimple_stmt_iterator gsi;
3661 int n = 0;
3662 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3663 n += estimate_num_insns (gsi_stmt (gsi), weights);
3665 return n;
3669 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3671 static void
3672 prepend_lexical_block (tree current_block, tree new_block)
3674 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3675 BLOCK_SUBBLOCKS (current_block) = new_block;
3676 BLOCK_SUPERCONTEXT (new_block) = current_block;
3679 /* Add local variables from CALLEE to CALLER. */
3681 static inline void
3682 add_local_variables (struct function *callee, struct function *caller,
3683 copy_body_data *id, bool check_var_ann)
3685 tree var;
3686 unsigned ix;
3688 FOR_EACH_LOCAL_DECL (callee, ix, var)
3689 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3691 if (!check_var_ann
3692 || (var_ann (var) && add_referenced_var (var)))
3693 add_local_decl (caller, var);
3695 else if (!can_be_nonlocal (var, id))
3697 tree new_var = remap_decl (var, id);
3699 /* Remap debug-expressions. */
3700 if (TREE_CODE (new_var) == VAR_DECL
3701 && DECL_DEBUG_EXPR_IS_FROM (new_var)
3702 && new_var != var)
3704 tree tem = DECL_DEBUG_EXPR (var);
3705 bool old_regimplify = id->regimplify;
3706 id->remapping_type_depth++;
3707 walk_tree (&tem, copy_tree_body_r, id, NULL);
3708 id->remapping_type_depth--;
3709 id->regimplify = old_regimplify;
3710 SET_DECL_DEBUG_EXPR (new_var, tem);
3712 add_local_decl (caller, new_var);
3716 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3718 static bool
3719 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3721 tree use_retvar;
3722 tree fn;
3723 struct pointer_map_t *st, *dst;
3724 tree return_slot;
3725 tree modify_dest;
3726 location_t saved_location;
3727 struct cgraph_edge *cg_edge;
3728 cgraph_inline_failed_t reason;
3729 basic_block return_block;
3730 edge e;
3731 gimple_stmt_iterator gsi, stmt_gsi;
3732 bool successfully_inlined = FALSE;
3733 bool purge_dead_abnormal_edges;
3735 /* Set input_location here so we get the right instantiation context
3736 if we call instantiate_decl from inlinable_function_p. */
3737 saved_location = input_location;
3738 if (gimple_has_location (stmt))
3739 input_location = gimple_location (stmt);
3741 /* From here on, we're only interested in CALL_EXPRs. */
3742 if (gimple_code (stmt) != GIMPLE_CALL)
3743 goto egress;
3745 /* First, see if we can figure out what function is being called.
3746 If we cannot, then there is no hope of inlining the function. */
3747 fn = gimple_call_fndecl (stmt);
3748 if (!fn)
3749 goto egress;
3751 /* Turn forward declarations into real ones. */
3752 fn = cgraph_node (fn)->decl;
3754 /* If FN is a declaration of a function in a nested scope that was
3755 globally declared inline, we don't set its DECL_INITIAL.
3756 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3757 C++ front-end uses it for cdtors to refer to their internal
3758 declarations, that are not real functions. Fortunately those
3759 don't have trees to be saved, so we can tell by checking their
3760 gimple_body. */
3761 if (!DECL_INITIAL (fn)
3762 && DECL_ABSTRACT_ORIGIN (fn)
3763 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3764 fn = DECL_ABSTRACT_ORIGIN (fn);
3766 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3767 Kill this check once this is fixed. */
3768 if (!id->dst_node->analyzed)
3769 goto egress;
3771 cg_edge = cgraph_edge (id->dst_node, stmt);
3773 /* First check that inlining isn't simply forbidden in this case. */
3774 if (inline_forbidden_into_p (cg_edge->caller->decl, cg_edge->callee->decl))
3775 goto egress;
3777 /* Don't try to inline functions that are not well-suited to inlining. */
3778 if (!cgraph_inline_p (cg_edge, &reason))
3780 /* If this call was originally indirect, we do not want to emit any
3781 inlining related warnings or sorry messages because there are no
3782 guarantees regarding those. */
3783 if (cg_edge->indirect_inlining_edge)
3784 goto egress;
3786 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3787 /* Avoid warnings during early inline pass. */
3788 && cgraph_global_info_ready)
3790 sorry ("inlining failed in call to %q+F: %s", fn,
3791 _(cgraph_inline_failed_string (reason)));
3792 sorry ("called from here");
3794 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3795 && !DECL_IN_SYSTEM_HEADER (fn)
3796 && reason != CIF_UNSPECIFIED
3797 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3798 /* Avoid warnings during early inline pass. */
3799 && cgraph_global_info_ready)
3801 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3802 fn, _(cgraph_inline_failed_string (reason)));
3803 warning (OPT_Winline, "called from here");
3805 goto egress;
3807 fn = cg_edge->callee->decl;
3809 #ifdef ENABLE_CHECKING
3810 if (cg_edge->callee->decl != id->dst_node->decl)
3811 verify_cgraph_node (cg_edge->callee);
3812 #endif
3814 /* We will be inlining this callee. */
3815 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3817 /* Update the callers EH personality. */
3818 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3819 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3820 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3822 /* Split the block holding the GIMPLE_CALL. */
3823 e = split_block (bb, stmt);
3824 bb = e->src;
3825 return_block = e->dest;
3826 remove_edge (e);
3828 /* split_block splits after the statement; work around this by
3829 moving the call into the second block manually. Not pretty,
3830 but seems easier than doing the CFG manipulation by hand
3831 when the GIMPLE_CALL is in the last statement of BB. */
3832 stmt_gsi = gsi_last_bb (bb);
3833 gsi_remove (&stmt_gsi, false);
3835 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3836 been the source of abnormal edges. In this case, schedule
3837 the removal of dead abnormal edges. */
3838 gsi = gsi_start_bb (return_block);
3839 if (gsi_end_p (gsi))
3841 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3842 purge_dead_abnormal_edges = true;
3844 else
3846 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3847 purge_dead_abnormal_edges = false;
3850 stmt_gsi = gsi_start_bb (return_block);
3852 /* Build a block containing code to initialize the arguments, the
3853 actual inline expansion of the body, and a label for the return
3854 statements within the function to jump to. The type of the
3855 statement expression is the return type of the function call. */
3856 id->block = make_node (BLOCK);
3857 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3858 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3859 prepend_lexical_block (gimple_block (stmt), id->block);
3861 /* Local declarations will be replaced by their equivalents in this
3862 map. */
3863 st = id->decl_map;
3864 id->decl_map = pointer_map_create ();
3865 dst = id->debug_map;
3866 id->debug_map = NULL;
3868 /* Record the function we are about to inline. */
3869 id->src_fn = fn;
3870 id->src_node = cg_edge->callee;
3871 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3872 id->gimple_call = stmt;
3874 gcc_assert (!id->src_cfun->after_inlining);
3876 id->entry_bb = bb;
3877 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3879 gimple_stmt_iterator si = gsi_last_bb (bb);
3880 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3881 NOT_TAKEN),
3882 GSI_NEW_STMT);
3884 initialize_inlined_parameters (id, stmt, fn, bb);
3886 if (DECL_INITIAL (fn))
3887 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3889 /* Return statements in the function body will be replaced by jumps
3890 to the RET_LABEL. */
3891 gcc_assert (DECL_INITIAL (fn));
3892 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3894 /* Find the LHS to which the result of this call is assigned. */
3895 return_slot = NULL;
3896 if (gimple_call_lhs (stmt))
3898 modify_dest = gimple_call_lhs (stmt);
3900 /* The function which we are inlining might not return a value,
3901 in which case we should issue a warning that the function
3902 does not return a value. In that case the optimizers will
3903 see that the variable to which the value is assigned was not
3904 initialized. We do not want to issue a warning about that
3905 uninitialized variable. */
3906 if (DECL_P (modify_dest))
3907 TREE_NO_WARNING (modify_dest) = 1;
3909 if (gimple_call_return_slot_opt_p (stmt))
3911 return_slot = modify_dest;
3912 modify_dest = NULL;
3915 else
3916 modify_dest = NULL;
3918 /* If we are inlining a call to the C++ operator new, we don't want
3919 to use type based alias analysis on the return value. Otherwise
3920 we may get confused if the compiler sees that the inlined new
3921 function returns a pointer which was just deleted. See bug
3922 33407. */
3923 if (DECL_IS_OPERATOR_NEW (fn))
3925 return_slot = NULL;
3926 modify_dest = NULL;
3929 /* Declare the return variable for the function. */
3930 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
3932 /* Add local vars in this inlined callee to caller. */
3933 add_local_variables (id->src_cfun, cfun, id, true);
3935 if (dump_file && (dump_flags & TDF_DETAILS))
3937 fprintf (dump_file, "Inlining ");
3938 print_generic_expr (dump_file, id->src_fn, 0);
3939 fprintf (dump_file, " to ");
3940 print_generic_expr (dump_file, id->dst_fn, 0);
3941 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3944 /* This is it. Duplicate the callee body. Assume callee is
3945 pre-gimplified. Note that we must not alter the caller
3946 function in any way before this point, as this CALL_EXPR may be
3947 a self-referential call; if we're calling ourselves, we need to
3948 duplicate our body before altering anything. */
3949 copy_body (id, bb->count,
3950 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3951 bb, return_block, NULL, NULL);
3953 /* Reset the escaped solution. */
3954 if (cfun->gimple_df)
3955 pt_solution_reset (&cfun->gimple_df->escaped);
3957 /* Clean up. */
3958 if (id->debug_map)
3960 pointer_map_destroy (id->debug_map);
3961 id->debug_map = dst;
3963 pointer_map_destroy (id->decl_map);
3964 id->decl_map = st;
3966 /* Unlink the calls virtual operands before replacing it. */
3967 unlink_stmt_vdef (stmt);
3969 /* If the inlined function returns a result that we care about,
3970 substitute the GIMPLE_CALL with an assignment of the return
3971 variable to the LHS of the call. That is, if STMT was
3972 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3973 if (use_retvar && gimple_call_lhs (stmt))
3975 gimple old_stmt = stmt;
3976 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3977 gsi_replace (&stmt_gsi, stmt, false);
3978 if (gimple_in_ssa_p (cfun))
3979 mark_symbols_for_renaming (stmt);
3980 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3982 else
3984 /* Handle the case of inlining a function with no return
3985 statement, which causes the return value to become undefined. */
3986 if (gimple_call_lhs (stmt)
3987 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3989 tree name = gimple_call_lhs (stmt);
3990 tree var = SSA_NAME_VAR (name);
3991 tree def = gimple_default_def (cfun, var);
3993 if (def)
3995 /* If the variable is used undefined, make this name
3996 undefined via a move. */
3997 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3998 gsi_replace (&stmt_gsi, stmt, true);
4000 else
4002 /* Otherwise make this variable undefined. */
4003 gsi_remove (&stmt_gsi, true);
4004 set_default_def (var, name);
4005 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4008 else
4009 gsi_remove (&stmt_gsi, true);
4012 if (purge_dead_abnormal_edges)
4014 gimple_purge_dead_eh_edges (return_block);
4015 gimple_purge_dead_abnormal_call_edges (return_block);
4018 /* If the value of the new expression is ignored, that's OK. We
4019 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4020 the equivalent inlined version either. */
4021 if (is_gimple_assign (stmt))
4023 gcc_assert (gimple_assign_single_p (stmt)
4024 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4025 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4028 /* Output the inlining info for this abstract function, since it has been
4029 inlined. If we don't do this now, we can lose the information about the
4030 variables in the function when the blocks get blown away as soon as we
4031 remove the cgraph node. */
4032 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4034 /* Update callgraph if needed. */
4035 cgraph_remove_node (cg_edge->callee);
4037 id->block = NULL_TREE;
4038 successfully_inlined = TRUE;
4040 egress:
4041 input_location = saved_location;
4042 return successfully_inlined;
4045 /* Expand call statements reachable from STMT_P.
4046 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4047 in a MODIFY_EXPR. See gimple.c:get_call_expr_in(). We can
4048 unfortunately not use that function here because we need a pointer
4049 to the CALL_EXPR, not the tree itself. */
4051 static bool
4052 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4054 gimple_stmt_iterator gsi;
4056 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4058 gimple stmt = gsi_stmt (gsi);
4060 if (is_gimple_call (stmt)
4061 && expand_call_inline (bb, stmt, id))
4062 return true;
4065 return false;
4069 /* Walk all basic blocks created after FIRST and try to fold every statement
4070 in the STATEMENTS pointer set. */
4072 static void
4073 fold_marked_statements (int first, struct pointer_set_t *statements)
4075 for (; first < n_basic_blocks; first++)
4076 if (BASIC_BLOCK (first))
4078 gimple_stmt_iterator gsi;
4080 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4081 !gsi_end_p (gsi);
4082 gsi_next (&gsi))
4083 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4085 gimple old_stmt = gsi_stmt (gsi);
4086 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4088 if (old_decl && DECL_BUILT_IN (old_decl))
4090 /* Folding builtins can create multiple instructions,
4091 we need to look at all of them. */
4092 gimple_stmt_iterator i2 = gsi;
4093 gsi_prev (&i2);
4094 if (fold_stmt (&gsi))
4096 gimple new_stmt;
4097 if (gsi_end_p (i2))
4098 i2 = gsi_start_bb (BASIC_BLOCK (first));
4099 else
4100 gsi_next (&i2);
4101 while (1)
4103 new_stmt = gsi_stmt (i2);
4104 update_stmt (new_stmt);
4105 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4106 new_stmt);
4108 if (new_stmt == gsi_stmt (gsi))
4110 /* It is okay to check only for the very last
4111 of these statements. If it is a throwing
4112 statement nothing will change. If it isn't
4113 this can remove EH edges. If that weren't
4114 correct then because some intermediate stmts
4115 throw, but not the last one. That would mean
4116 we'd have to split the block, which we can't
4117 here and we'd loose anyway. And as builtins
4118 probably never throw, this all
4119 is mood anyway. */
4120 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4121 new_stmt))
4122 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4123 break;
4125 gsi_next (&i2);
4129 else if (fold_stmt (&gsi))
4131 /* Re-read the statement from GSI as fold_stmt() may
4132 have changed it. */
4133 gimple new_stmt = gsi_stmt (gsi);
4134 update_stmt (new_stmt);
4136 if (is_gimple_call (old_stmt)
4137 || is_gimple_call (new_stmt))
4138 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4139 new_stmt);
4141 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4142 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4148 /* Return true if BB has at least one abnormal outgoing edge. */
4150 static inline bool
4151 has_abnormal_outgoing_edge_p (basic_block bb)
4153 edge e;
4154 edge_iterator ei;
4156 FOR_EACH_EDGE (e, ei, bb->succs)
4157 if (e->flags & EDGE_ABNORMAL)
4158 return true;
4160 return false;
4163 /* Expand calls to inline functions in the body of FN. */
4165 unsigned int
4166 optimize_inline_calls (tree fn)
4168 copy_body_data id;
4169 basic_block bb;
4170 int last = n_basic_blocks;
4171 struct gimplify_ctx gctx;
4172 bool inlined_p = false;
4174 /* There is no point in performing inlining if errors have already
4175 occurred -- and we might crash if we try to inline invalid
4176 code. */
4177 if (seen_error ())
4178 return 0;
4180 /* Clear out ID. */
4181 memset (&id, 0, sizeof (id));
4183 id.src_node = id.dst_node = cgraph_node (fn);
4184 id.dst_fn = fn;
4185 /* Or any functions that aren't finished yet. */
4186 if (current_function_decl)
4187 id.dst_fn = current_function_decl;
4189 id.copy_decl = copy_decl_maybe_to_var;
4190 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4191 id.transform_new_cfg = false;
4192 id.transform_return_to_modify = true;
4193 id.transform_lang_insert_block = NULL;
4194 id.statements_to_fold = pointer_set_create ();
4196 push_gimplify_context (&gctx);
4198 /* We make no attempts to keep dominance info up-to-date. */
4199 free_dominance_info (CDI_DOMINATORS);
4200 free_dominance_info (CDI_POST_DOMINATORS);
4202 /* Register specific gimple functions. */
4203 gimple_register_cfg_hooks ();
4205 /* Reach the trees by walking over the CFG, and note the
4206 enclosing basic-blocks in the call edges. */
4207 /* We walk the blocks going forward, because inlined function bodies
4208 will split id->current_basic_block, and the new blocks will
4209 follow it; we'll trudge through them, processing their CALL_EXPRs
4210 along the way. */
4211 FOR_EACH_BB (bb)
4212 inlined_p |= gimple_expand_calls_inline (bb, &id);
4214 pop_gimplify_context (NULL);
4216 #ifdef ENABLE_CHECKING
4218 struct cgraph_edge *e;
4220 verify_cgraph_node (id.dst_node);
4222 /* Double check that we inlined everything we are supposed to inline. */
4223 for (e = id.dst_node->callees; e; e = e->next_callee)
4224 gcc_assert (e->inline_failed);
4226 #endif
4228 /* Fold queued statements. */
4229 fold_marked_statements (last, id.statements_to_fold);
4230 pointer_set_destroy (id.statements_to_fold);
4232 gcc_assert (!id.debug_stmts);
4234 /* If we didn't inline into the function there is nothing to do. */
4235 if (!inlined_p)
4236 return 0;
4238 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4239 number_blocks (fn);
4241 delete_unreachable_blocks_update_callgraph (&id);
4242 #ifdef ENABLE_CHECKING
4243 verify_cgraph_node (id.dst_node);
4244 #endif
4246 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4247 not possible yet - the IPA passes might make various functions to not
4248 throw and they don't care to proactively update local EH info. This is
4249 done later in fixup_cfg pass that also execute the verification. */
4250 return (TODO_update_ssa
4251 | TODO_cleanup_cfg
4252 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4253 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4254 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4257 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4259 tree
4260 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4262 enum tree_code code = TREE_CODE (*tp);
4263 enum tree_code_class cl = TREE_CODE_CLASS (code);
4265 /* We make copies of most nodes. */
4266 if (IS_EXPR_CODE_CLASS (cl)
4267 || code == TREE_LIST
4268 || code == TREE_VEC
4269 || code == TYPE_DECL
4270 || code == OMP_CLAUSE)
4272 /* Because the chain gets clobbered when we make a copy, we save it
4273 here. */
4274 tree chain = NULL_TREE, new_tree;
4276 chain = TREE_CHAIN (*tp);
4278 /* Copy the node. */
4279 new_tree = copy_node (*tp);
4281 /* Propagate mudflap marked-ness. */
4282 if (flag_mudflap && mf_marked_p (*tp))
4283 mf_mark (new_tree);
4285 *tp = new_tree;
4287 /* Now, restore the chain, if appropriate. That will cause
4288 walk_tree to walk into the chain as well. */
4289 if (code == PARM_DECL
4290 || code == TREE_LIST
4291 || code == OMP_CLAUSE)
4292 TREE_CHAIN (*tp) = chain;
4294 /* For now, we don't update BLOCKs when we make copies. So, we
4295 have to nullify all BIND_EXPRs. */
4296 if (TREE_CODE (*tp) == BIND_EXPR)
4297 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4299 else if (code == CONSTRUCTOR)
4301 /* CONSTRUCTOR nodes need special handling because
4302 we need to duplicate the vector of elements. */
4303 tree new_tree;
4305 new_tree = copy_node (*tp);
4307 /* Propagate mudflap marked-ness. */
4308 if (flag_mudflap && mf_marked_p (*tp))
4309 mf_mark (new_tree);
4311 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4312 CONSTRUCTOR_ELTS (*tp));
4313 *tp = new_tree;
4315 else if (TREE_CODE_CLASS (code) == tcc_type)
4316 *walk_subtrees = 0;
4317 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4318 *walk_subtrees = 0;
4319 else if (TREE_CODE_CLASS (code) == tcc_constant)
4320 *walk_subtrees = 0;
4321 else
4322 gcc_assert (code != STATEMENT_LIST);
4323 return NULL_TREE;
4326 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4327 information indicating to what new SAVE_EXPR this one should be mapped,
4328 use that one. Otherwise, create a new node and enter it in ST. FN is
4329 the function into which the copy will be placed. */
4331 static void
4332 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4334 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4335 tree *n;
4336 tree t;
4338 /* See if we already encountered this SAVE_EXPR. */
4339 n = (tree *) pointer_map_contains (st, *tp);
4341 /* If we didn't already remap this SAVE_EXPR, do so now. */
4342 if (!n)
4344 t = copy_node (*tp);
4346 /* Remember this SAVE_EXPR. */
4347 *pointer_map_insert (st, *tp) = t;
4348 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4349 *pointer_map_insert (st, t) = t;
4351 else
4353 /* We've already walked into this SAVE_EXPR; don't do it again. */
4354 *walk_subtrees = 0;
4355 t = *n;
4358 /* Replace this SAVE_EXPR with the copy. */
4359 *tp = t;
4362 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4363 copies the declaration and enters it in the splay_tree in DATA (which is
4364 really an `copy_body_data *'). */
4366 static tree
4367 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4368 void *data)
4370 copy_body_data *id = (copy_body_data *) data;
4372 /* Don't walk into types. */
4373 if (TYPE_P (*tp))
4374 *walk_subtrees = 0;
4376 else if (TREE_CODE (*tp) == LABEL_EXPR)
4378 tree decl = TREE_OPERAND (*tp, 0);
4380 /* Copy the decl and remember the copy. */
4381 insert_decl_map (id, decl, id->copy_decl (decl, id));
4384 return NULL_TREE;
4387 /* Perform any modifications to EXPR required when it is unsaved. Does
4388 not recurse into EXPR's subtrees. */
4390 static void
4391 unsave_expr_1 (tree expr)
4393 switch (TREE_CODE (expr))
4395 case TARGET_EXPR:
4396 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4397 It's OK for this to happen if it was part of a subtree that
4398 isn't immediately expanded, such as operand 2 of another
4399 TARGET_EXPR. */
4400 if (TREE_OPERAND (expr, 1))
4401 break;
4403 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4404 TREE_OPERAND (expr, 3) = NULL_TREE;
4405 break;
4407 default:
4408 break;
4412 /* Called via walk_tree when an expression is unsaved. Using the
4413 splay_tree pointed to by ST (which is really a `splay_tree'),
4414 remaps all local declarations to appropriate replacements. */
4416 static tree
4417 unsave_r (tree *tp, int *walk_subtrees, void *data)
4419 copy_body_data *id = (copy_body_data *) data;
4420 struct pointer_map_t *st = id->decl_map;
4421 tree *n;
4423 /* Only a local declaration (variable or label). */
4424 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4425 || TREE_CODE (*tp) == LABEL_DECL)
4427 /* Lookup the declaration. */
4428 n = (tree *) pointer_map_contains (st, *tp);
4430 /* If it's there, remap it. */
4431 if (n)
4432 *tp = *n;
4435 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4436 gcc_unreachable ();
4437 else if (TREE_CODE (*tp) == BIND_EXPR)
4438 copy_bind_expr (tp, walk_subtrees, id);
4439 else if (TREE_CODE (*tp) == SAVE_EXPR
4440 || TREE_CODE (*tp) == TARGET_EXPR)
4441 remap_save_expr (tp, st, walk_subtrees);
4442 else
4444 copy_tree_r (tp, walk_subtrees, NULL);
4446 /* Do whatever unsaving is required. */
4447 unsave_expr_1 (*tp);
4450 /* Keep iterating. */
4451 return NULL_TREE;
4454 /* Copies everything in EXPR and replaces variables, labels
4455 and SAVE_EXPRs local to EXPR. */
4457 tree
4458 unsave_expr_now (tree expr)
4460 copy_body_data id;
4462 /* There's nothing to do for NULL_TREE. */
4463 if (expr == 0)
4464 return expr;
4466 /* Set up ID. */
4467 memset (&id, 0, sizeof (id));
4468 id.src_fn = current_function_decl;
4469 id.dst_fn = current_function_decl;
4470 id.decl_map = pointer_map_create ();
4471 id.debug_map = NULL;
4473 id.copy_decl = copy_decl_no_change;
4474 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4475 id.transform_new_cfg = false;
4476 id.transform_return_to_modify = false;
4477 id.transform_lang_insert_block = NULL;
4479 /* Walk the tree once to find local labels. */
4480 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4482 /* Walk the tree again, copying, remapping, and unsaving. */
4483 walk_tree (&expr, unsave_r, &id, NULL);
4485 /* Clean up. */
4486 pointer_map_destroy (id.decl_map);
4487 if (id.debug_map)
4488 pointer_map_destroy (id.debug_map);
4490 return expr;
4493 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4494 label, copies the declaration and enters it in the splay_tree in DATA (which
4495 is really a 'copy_body_data *'. */
4497 static tree
4498 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4499 bool *handled_ops_p ATTRIBUTE_UNUSED,
4500 struct walk_stmt_info *wi)
4502 copy_body_data *id = (copy_body_data *) wi->info;
4503 gimple stmt = gsi_stmt (*gsip);
4505 if (gimple_code (stmt) == GIMPLE_LABEL)
4507 tree decl = gimple_label_label (stmt);
4509 /* Copy the decl and remember the copy. */
4510 insert_decl_map (id, decl, id->copy_decl (decl, id));
4513 return NULL_TREE;
4517 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4518 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4519 remaps all local declarations to appropriate replacements in gimple
4520 operands. */
4522 static tree
4523 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4525 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4526 copy_body_data *id = (copy_body_data *) wi->info;
4527 struct pointer_map_t *st = id->decl_map;
4528 tree *n;
4529 tree expr = *tp;
4531 /* Only a local declaration (variable or label). */
4532 if ((TREE_CODE (expr) == VAR_DECL
4533 && !TREE_STATIC (expr))
4534 || TREE_CODE (expr) == LABEL_DECL)
4536 /* Lookup the declaration. */
4537 n = (tree *) pointer_map_contains (st, expr);
4539 /* If it's there, remap it. */
4540 if (n)
4541 *tp = *n;
4542 *walk_subtrees = 0;
4544 else if (TREE_CODE (expr) == STATEMENT_LIST
4545 || TREE_CODE (expr) == BIND_EXPR
4546 || TREE_CODE (expr) == SAVE_EXPR)
4547 gcc_unreachable ();
4548 else if (TREE_CODE (expr) == TARGET_EXPR)
4550 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4551 It's OK for this to happen if it was part of a subtree that
4552 isn't immediately expanded, such as operand 2 of another
4553 TARGET_EXPR. */
4554 if (!TREE_OPERAND (expr, 1))
4556 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4557 TREE_OPERAND (expr, 3) = NULL_TREE;
4561 /* Keep iterating. */
4562 return NULL_TREE;
4566 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4567 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4568 remaps all local declarations to appropriate replacements in gimple
4569 statements. */
4571 static tree
4572 replace_locals_stmt (gimple_stmt_iterator *gsip,
4573 bool *handled_ops_p ATTRIBUTE_UNUSED,
4574 struct walk_stmt_info *wi)
4576 copy_body_data *id = (copy_body_data *) wi->info;
4577 gimple stmt = gsi_stmt (*gsip);
4579 if (gimple_code (stmt) == GIMPLE_BIND)
4581 tree block = gimple_bind_block (stmt);
4583 if (block)
4585 remap_block (&block, id);
4586 gimple_bind_set_block (stmt, block);
4589 /* This will remap a lot of the same decls again, but this should be
4590 harmless. */
4591 if (gimple_bind_vars (stmt))
4592 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4595 /* Keep iterating. */
4596 return NULL_TREE;
4600 /* Copies everything in SEQ and replaces variables and labels local to
4601 current_function_decl. */
4603 gimple_seq
4604 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4606 copy_body_data id;
4607 struct walk_stmt_info wi;
4608 struct pointer_set_t *visited;
4609 gimple_seq copy;
4611 /* There's nothing to do for NULL_TREE. */
4612 if (seq == NULL)
4613 return seq;
4615 /* Set up ID. */
4616 memset (&id, 0, sizeof (id));
4617 id.src_fn = current_function_decl;
4618 id.dst_fn = current_function_decl;
4619 id.decl_map = pointer_map_create ();
4620 id.debug_map = NULL;
4622 id.copy_decl = copy_decl_no_change;
4623 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4624 id.transform_new_cfg = false;
4625 id.transform_return_to_modify = false;
4626 id.transform_lang_insert_block = NULL;
4628 /* Walk the tree once to find local labels. */
4629 memset (&wi, 0, sizeof (wi));
4630 visited = pointer_set_create ();
4631 wi.info = &id;
4632 wi.pset = visited;
4633 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4634 pointer_set_destroy (visited);
4636 copy = gimple_seq_copy (seq);
4638 /* Walk the copy, remapping decls. */
4639 memset (&wi, 0, sizeof (wi));
4640 wi.info = &id;
4641 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4643 /* Clean up. */
4644 pointer_map_destroy (id.decl_map);
4645 if (id.debug_map)
4646 pointer_map_destroy (id.debug_map);
4648 return copy;
4652 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4654 static tree
4655 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4657 if (*tp == data)
4658 return (tree) data;
4659 else
4660 return NULL;
4663 DEBUG_FUNCTION bool
4664 debug_find_tree (tree top, tree search)
4666 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4670 /* Declare the variables created by the inliner. Add all the variables in
4671 VARS to BIND_EXPR. */
4673 static void
4674 declare_inline_vars (tree block, tree vars)
4676 tree t;
4677 for (t = vars; t; t = DECL_CHAIN (t))
4679 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4680 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4681 add_local_decl (cfun, t);
4684 if (block)
4685 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4688 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4689 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4690 VAR_DECL translation. */
4692 static tree
4693 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4695 /* Don't generate debug information for the copy if we wouldn't have
4696 generated it for the copy either. */
4697 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4698 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4700 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4701 declaration inspired this copy. */
4702 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4704 /* The new variable/label has no RTL, yet. */
4705 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4706 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4707 SET_DECL_RTL (copy, 0);
4709 /* These args would always appear unused, if not for this. */
4710 TREE_USED (copy) = 1;
4712 /* Set the context for the new declaration. */
4713 if (!DECL_CONTEXT (decl))
4714 /* Globals stay global. */
4716 else if (DECL_CONTEXT (decl) != id->src_fn)
4717 /* Things that weren't in the scope of the function we're inlining
4718 from aren't in the scope we're inlining to, either. */
4720 else if (TREE_STATIC (decl))
4721 /* Function-scoped static variables should stay in the original
4722 function. */
4724 else
4725 /* Ordinary automatic local variables are now in the scope of the
4726 new function. */
4727 DECL_CONTEXT (copy) = id->dst_fn;
4729 return copy;
4732 static tree
4733 copy_decl_to_var (tree decl, copy_body_data *id)
4735 tree copy, type;
4737 gcc_assert (TREE_CODE (decl) == PARM_DECL
4738 || TREE_CODE (decl) == RESULT_DECL);
4740 type = TREE_TYPE (decl);
4742 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4743 VAR_DECL, DECL_NAME (decl), type);
4744 if (DECL_PT_UID_SET_P (decl))
4745 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4746 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4747 TREE_READONLY (copy) = TREE_READONLY (decl);
4748 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4749 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4751 return copy_decl_for_dup_finish (id, decl, copy);
4754 /* Like copy_decl_to_var, but create a return slot object instead of a
4755 pointer variable for return by invisible reference. */
4757 static tree
4758 copy_result_decl_to_var (tree decl, copy_body_data *id)
4760 tree copy, type;
4762 gcc_assert (TREE_CODE (decl) == PARM_DECL
4763 || TREE_CODE (decl) == RESULT_DECL);
4765 type = TREE_TYPE (decl);
4766 if (DECL_BY_REFERENCE (decl))
4767 type = TREE_TYPE (type);
4769 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4770 VAR_DECL, DECL_NAME (decl), type);
4771 if (DECL_PT_UID_SET_P (decl))
4772 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4773 TREE_READONLY (copy) = TREE_READONLY (decl);
4774 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4775 if (!DECL_BY_REFERENCE (decl))
4777 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4778 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4781 return copy_decl_for_dup_finish (id, decl, copy);
4784 tree
4785 copy_decl_no_change (tree decl, copy_body_data *id)
4787 tree copy;
4789 copy = copy_node (decl);
4791 /* The COPY is not abstract; it will be generated in DST_FN. */
4792 DECL_ABSTRACT (copy) = 0;
4793 lang_hooks.dup_lang_specific_decl (copy);
4795 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4796 been taken; it's for internal bookkeeping in expand_goto_internal. */
4797 if (TREE_CODE (copy) == LABEL_DECL)
4799 TREE_ADDRESSABLE (copy) = 0;
4800 LABEL_DECL_UID (copy) = -1;
4803 return copy_decl_for_dup_finish (id, decl, copy);
4806 static tree
4807 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4809 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4810 return copy_decl_to_var (decl, id);
4811 else
4812 return copy_decl_no_change (decl, id);
4815 /* Return a copy of the function's argument tree. */
4816 static tree
4817 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4818 bitmap args_to_skip, tree *vars)
4820 tree arg, *parg;
4821 tree new_parm = NULL;
4822 int i = 0;
4824 parg = &new_parm;
4826 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
4827 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4829 tree new_tree = remap_decl (arg, id);
4830 lang_hooks.dup_lang_specific_decl (new_tree);
4831 *parg = new_tree;
4832 parg = &DECL_CHAIN (new_tree);
4834 else if (!pointer_map_contains (id->decl_map, arg))
4836 /* Make an equivalent VAR_DECL. If the argument was used
4837 as temporary variable later in function, the uses will be
4838 replaced by local variable. */
4839 tree var = copy_decl_to_var (arg, id);
4840 get_var_ann (var);
4841 add_referenced_var (var);
4842 insert_decl_map (id, arg, var);
4843 /* Declare this new variable. */
4844 DECL_CHAIN (var) = *vars;
4845 *vars = var;
4847 return new_parm;
4850 /* Return a copy of the function's static chain. */
4851 static tree
4852 copy_static_chain (tree static_chain, copy_body_data * id)
4854 tree *chain_copy, *pvar;
4856 chain_copy = &static_chain;
4857 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
4859 tree new_tree = remap_decl (*pvar, id);
4860 lang_hooks.dup_lang_specific_decl (new_tree);
4861 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
4862 *pvar = new_tree;
4864 return static_chain;
4867 /* Return true if the function is allowed to be versioned.
4868 This is a guard for the versioning functionality. */
4870 bool
4871 tree_versionable_function_p (tree fndecl)
4873 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4874 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4877 /* Delete all unreachable basic blocks and update callgraph.
4878 Doing so is somewhat nontrivial because we need to update all clones and
4879 remove inline function that become unreachable. */
4881 static bool
4882 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4884 bool changed = false;
4885 basic_block b, next_bb;
4887 find_unreachable_blocks ();
4889 /* Delete all unreachable basic blocks. */
4891 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4893 next_bb = b->next_bb;
4895 if (!(b->flags & BB_REACHABLE))
4897 gimple_stmt_iterator bsi;
4899 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4900 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4902 struct cgraph_edge *e;
4903 struct cgraph_node *node;
4905 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4907 if (!e->inline_failed)
4908 cgraph_remove_node_and_inline_clones (e->callee);
4909 else
4910 cgraph_remove_edge (e);
4912 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4913 && id->dst_node->clones)
4914 for (node = id->dst_node->clones; node != id->dst_node;)
4916 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4918 if (!e->inline_failed)
4919 cgraph_remove_node_and_inline_clones (e->callee);
4920 else
4921 cgraph_remove_edge (e);
4924 if (node->clones)
4925 node = node->clones;
4926 else if (node->next_sibling_clone)
4927 node = node->next_sibling_clone;
4928 else
4930 while (node != id->dst_node && !node->next_sibling_clone)
4931 node = node->clone_of;
4932 if (node != id->dst_node)
4933 node = node->next_sibling_clone;
4937 delete_basic_block (b);
4938 changed = true;
4942 if (changed)
4943 tidy_fallthru_edges ();
4944 return changed;
4947 /* Update clone info after duplication. */
4949 static void
4950 update_clone_info (copy_body_data * id)
4952 struct cgraph_node *node;
4953 if (!id->dst_node->clones)
4954 return;
4955 for (node = id->dst_node->clones; node != id->dst_node;)
4957 /* First update replace maps to match the new body. */
4958 if (node->clone.tree_map)
4960 unsigned int i;
4961 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4963 struct ipa_replace_map *replace_info;
4964 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4965 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4966 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4969 if (node->clones)
4970 node = node->clones;
4971 else if (node->next_sibling_clone)
4972 node = node->next_sibling_clone;
4973 else
4975 while (node != id->dst_node && !node->next_sibling_clone)
4976 node = node->clone_of;
4977 if (node != id->dst_node)
4978 node = node->next_sibling_clone;
4983 /* Create a copy of a function's tree.
4984 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4985 of the original function and the new copied function
4986 respectively. In case we want to replace a DECL
4987 tree with another tree while duplicating the function's
4988 body, TREE_MAP represents the mapping between these
4989 trees. If UPDATE_CLONES is set, the call_stmt fields
4990 of edges of clones of the function will be updated.
4992 If non-NULL ARGS_TO_SKIP determine function parameters to remove
4993 from new version.
4994 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
4995 If non_NULL NEW_ENTRY determine new entry BB of the clone.
4997 void
4998 tree_function_versioning (tree old_decl, tree new_decl,
4999 VEC(ipa_replace_map_p,gc)* tree_map,
5000 bool update_clones, bitmap args_to_skip,
5001 bitmap blocks_to_copy, basic_block new_entry)
5003 struct cgraph_node *old_version_node;
5004 struct cgraph_node *new_version_node;
5005 copy_body_data id;
5006 tree p;
5007 unsigned i;
5008 struct ipa_replace_map *replace_info;
5009 basic_block old_entry_block, bb;
5010 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
5012 tree old_current_function_decl = current_function_decl;
5013 tree vars = NULL_TREE;
5015 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5016 && TREE_CODE (new_decl) == FUNCTION_DECL);
5017 DECL_POSSIBLY_INLINED (old_decl) = 1;
5019 old_version_node = cgraph_node (old_decl);
5020 new_version_node = cgraph_node (new_decl);
5022 /* Output the inlining info for this abstract function, since it has been
5023 inlined. If we don't do this now, we can lose the information about the
5024 variables in the function when the blocks get blown away as soon as we
5025 remove the cgraph node. */
5026 (*debug_hooks->outlining_inline_function) (old_decl);
5028 DECL_ARTIFICIAL (new_decl) = 1;
5029 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5030 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5032 /* Prepare the data structures for the tree copy. */
5033 memset (&id, 0, sizeof (id));
5035 /* Generate a new name for the new version. */
5036 id.statements_to_fold = pointer_set_create ();
5038 id.decl_map = pointer_map_create ();
5039 id.debug_map = NULL;
5040 id.src_fn = old_decl;
5041 id.dst_fn = new_decl;
5042 id.src_node = old_version_node;
5043 id.dst_node = new_version_node;
5044 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5045 if (id.src_node->ipa_transforms_to_apply)
5047 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5048 unsigned int i;
5050 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5051 id.src_node->ipa_transforms_to_apply);
5052 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5053 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5054 VEC_index (ipa_opt_pass,
5055 old_transforms_to_apply,
5056 i));
5059 id.copy_decl = copy_decl_no_change;
5060 id.transform_call_graph_edges
5061 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5062 id.transform_new_cfg = true;
5063 id.transform_return_to_modify = false;
5064 id.transform_lang_insert_block = NULL;
5066 current_function_decl = new_decl;
5067 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5068 (DECL_STRUCT_FUNCTION (old_decl));
5069 initialize_cfun (new_decl, old_decl,
5070 old_entry_block->count);
5071 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5072 = id.src_cfun->gimple_df->ipa_pta;
5073 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
5075 /* Copy the function's static chain. */
5076 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5077 if (p)
5078 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5079 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5080 &id);
5082 /* If there's a tree_map, prepare for substitution. */
5083 if (tree_map)
5084 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
5086 gimple init;
5087 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
5088 if (replace_info->replace_p)
5090 tree op = replace_info->new_tree;
5091 if (!replace_info->old_tree)
5093 int i = replace_info->parm_num;
5094 tree parm;
5095 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5096 i --;
5097 replace_info->old_tree = parm;
5101 STRIP_NOPS (op);
5103 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5104 op = TREE_OPERAND (op, 0);
5106 if (TREE_CODE (op) == ADDR_EXPR)
5108 op = TREE_OPERAND (op, 0);
5109 while (handled_component_p (op))
5110 op = TREE_OPERAND (op, 0);
5111 if (TREE_CODE (op) == VAR_DECL)
5112 add_referenced_var (op);
5114 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5115 init = setup_one_parameter (&id, replace_info->old_tree,
5116 replace_info->new_tree, id.src_fn,
5117 NULL,
5118 &vars);
5119 if (init)
5120 VEC_safe_push (gimple, heap, init_stmts, init);
5123 /* Copy the function's arguments. */
5124 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5125 DECL_ARGUMENTS (new_decl) =
5126 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5127 args_to_skip, &vars);
5129 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5131 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5133 if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5134 /* Add local vars. */
5135 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id, false);
5137 /* Copy the Function's body. */
5138 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5139 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5141 if (DECL_RESULT (old_decl) != NULL_TREE)
5143 tree *res_decl = &DECL_RESULT (old_decl);
5144 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
5145 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5148 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5149 number_blocks (new_decl);
5151 /* We want to create the BB unconditionally, so that the addition of
5152 debug stmts doesn't affect BB count, which may in the end cause
5153 codegen differences. */
5154 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5155 while (VEC_length (gimple, init_stmts))
5156 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5157 update_clone_info (&id);
5159 /* Remap the nonlocal_goto_save_area, if any. */
5160 if (cfun->nonlocal_goto_save_area)
5162 struct walk_stmt_info wi;
5164 memset (&wi, 0, sizeof (wi));
5165 wi.info = &id;
5166 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5169 /* Clean up. */
5170 pointer_map_destroy (id.decl_map);
5171 if (id.debug_map)
5172 pointer_map_destroy (id.debug_map);
5173 free_dominance_info (CDI_DOMINATORS);
5174 free_dominance_info (CDI_POST_DOMINATORS);
5176 fold_marked_statements (0, id.statements_to_fold);
5177 pointer_set_destroy (id.statements_to_fold);
5178 fold_cond_expr_cond ();
5179 delete_unreachable_blocks_update_callgraph (&id);
5180 if (id.dst_node->analyzed)
5181 cgraph_rebuild_references ();
5182 update_ssa (TODO_update_ssa);
5184 /* After partial cloning we need to rescale frequencies, so they are
5185 within proper range in the cloned function. */
5186 if (new_entry)
5188 struct cgraph_edge *e;
5189 rebuild_frequencies ();
5191 new_version_node->count = ENTRY_BLOCK_PTR->count;
5192 for (e = new_version_node->callees; e; e = e->next_callee)
5194 basic_block bb = gimple_bb (e->call_stmt);
5195 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5196 bb);
5197 e->count = bb->count;
5199 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5201 basic_block bb = gimple_bb (e->call_stmt);
5202 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5203 bb);
5204 e->count = bb->count;
5208 free_dominance_info (CDI_DOMINATORS);
5209 free_dominance_info (CDI_POST_DOMINATORS);
5211 gcc_assert (!id.debug_stmts);
5212 VEC_free (gimple, heap, init_stmts);
5213 pop_cfun ();
5214 current_function_decl = old_current_function_decl;
5215 gcc_assert (!current_function_decl
5216 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5217 return;
5220 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5221 the callee and return the inlined body on success. */
5223 tree
5224 maybe_inline_call_in_expr (tree exp)
5226 tree fn = get_callee_fndecl (exp);
5228 /* We can only try to inline "const" functions. */
5229 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5231 struct pointer_map_t *decl_map = pointer_map_create ();
5232 call_expr_arg_iterator iter;
5233 copy_body_data id;
5234 tree param, arg, t;
5236 /* Remap the parameters. */
5237 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5238 param;
5239 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5240 *pointer_map_insert (decl_map, param) = arg;
5242 memset (&id, 0, sizeof (id));
5243 id.src_fn = fn;
5244 id.dst_fn = current_function_decl;
5245 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5246 id.decl_map = decl_map;
5248 id.copy_decl = copy_decl_no_change;
5249 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5250 id.transform_new_cfg = false;
5251 id.transform_return_to_modify = true;
5252 id.transform_lang_insert_block = false;
5254 /* Make sure not to unshare trees behind the front-end's back
5255 since front-end specific mechanisms may rely on sharing. */
5256 id.regimplify = false;
5257 id.do_not_unshare = true;
5259 /* We're not inside any EH region. */
5260 id.eh_lp_nr = 0;
5262 t = copy_tree_body (&id);
5263 pointer_map_destroy (decl_map);
5265 /* We can only return something suitable for use in a GENERIC
5266 expression tree. */
5267 if (TREE_CODE (t) == MODIFY_EXPR)
5268 return TREE_OPERAND (t, 1);
5271 return NULL_TREE;
5274 /* Duplicate a type, fields and all. */
5276 tree
5277 build_duplicate_type (tree type)
5279 struct copy_body_data id;
5281 memset (&id, 0, sizeof (id));
5282 id.src_fn = current_function_decl;
5283 id.dst_fn = current_function_decl;
5284 id.src_cfun = cfun;
5285 id.decl_map = pointer_map_create ();
5286 id.debug_map = NULL;
5287 id.copy_decl = copy_decl_no_change;
5289 type = remap_type_1 (type, &id);
5291 pointer_map_destroy (id.decl_map);
5292 if (id.debug_map)
5293 pointer_map_destroy (id.debug_map);
5295 TYPE_CANONICAL (type) = type;
5297 return type;
5300 /* Return whether it is safe to inline a function because it used different
5301 target specific options or call site actual types mismatch parameter types.
5302 E is the call edge to be checked. */
5303 bool
5304 tree_can_inline_p (struct cgraph_edge *e)
5306 #if 0
5307 /* This causes a regression in SPEC in that it prevents a cold function from
5308 inlining a hot function. Perhaps this should only apply to functions
5309 that the user declares hot/cold/optimize explicitly. */
5311 /* Don't inline a function with a higher optimization level than the
5312 caller, or with different space constraints (hot/cold functions). */
5313 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5314 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5316 if (caller_tree != callee_tree)
5318 struct cl_optimization *caller_opt
5319 = TREE_OPTIMIZATION ((caller_tree)
5320 ? caller_tree
5321 : optimization_default_node);
5323 struct cl_optimization *callee_opt
5324 = TREE_OPTIMIZATION ((callee_tree)
5325 ? callee_tree
5326 : optimization_default_node);
5328 if ((caller_opt->optimize > callee_opt->optimize)
5329 || (caller_opt->optimize_size != callee_opt->optimize_size))
5330 return false;
5332 #endif
5333 tree caller, callee, lhs;
5335 caller = e->caller->decl;
5336 callee = e->callee->decl;
5338 /* First check that inlining isn't simply forbidden in this case. */
5339 if (inline_forbidden_into_p (caller, callee))
5341 e->inline_failed = CIF_UNSPECIFIED;
5342 gimple_call_set_cannot_inline (e->call_stmt, true);
5343 return false;
5346 /* Allow the backend to decide if inlining is ok. */
5347 if (!targetm.target_option.can_inline_p (caller, callee))
5349 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5350 gimple_call_set_cannot_inline (e->call_stmt, true);
5351 e->call_stmt_cannot_inline_p = true;
5352 return false;
5355 /* Do not inline calls where we cannot triviall work around mismatches
5356 in argument or return types. */
5357 if (e->call_stmt
5358 && ((DECL_RESULT (callee)
5359 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
5360 && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
5361 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
5362 TREE_TYPE (lhs))
5363 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
5364 || !gimple_check_call_args (e->call_stmt)))
5366 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5367 gimple_call_set_cannot_inline (e->call_stmt, true);
5368 e->call_stmt_cannot_inline_p = true;
5369 return false;
5372 return true;