Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / tree-inline.c
blobfbd973b4281405f07c261489ba87e2d5c98004d6
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "hashtab.h"
36 #include "langhooks.h"
37 #include "basic-block.h"
38 #include "tree-iterator.h"
39 #include "cgraph.h"
40 #include "intl.h"
41 #include "tree-mudflap.h"
42 #include "tree-flow.h"
43 #include "function.h"
44 #include "ggc.h"
45 #include "tree-flow.h"
46 #include "diagnostic.h"
47 #include "except.h"
48 #include "debug.h"
49 #include "pointer-set.h"
50 #include "ipa-prop.h"
51 #include "value-prof.h"
52 #include "tree-pass.h"
53 #include "target.h"
54 #include "integrate.h"
56 /* I'm not real happy about this, but we need to handle gimple and
57 non-gimple trees. */
58 #include "gimple.h"
60 /* Inlining, Cloning, Versioning, Parallelization
62 Inlining: a function body is duplicated, but the PARM_DECLs are
63 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64 MODIFY_EXPRs that store to a dedicated returned-value variable.
65 The duplicated eh_region info of the copy will later be appended
66 to the info for the caller; the eh_region info in copied throwing
67 statements and RESX_EXPRs is adjusted accordingly.
69 Cloning: (only in C++) We have one body for a con/de/structor, and
70 multiple function decls, each with a unique parameter list.
71 Duplicate the body, using the given splay tree; some parameters
72 will become constants (like 0 or 1).
74 Versioning: a function body is duplicated and the result is a new
75 function rather than into blocks of an existing function as with
76 inlining. Some parameters will become constants.
78 Parallelization: a region of a function is duplicated resulting in
79 a new function. Variables may be replaced with complex expressions
80 to enable shared variable semantics.
82 All of these will simultaneously lookup any callgraph edges. If
83 we're going to inline the duplicated function body, and the given
84 function has some cloned callgraph nodes (one for each place this
85 function will be inlined) those callgraph edges will be duplicated.
86 If we're cloning the body, those callgraph edges will be
87 updated to point into the new body. (Note that the original
88 callgraph node and edge list will not be altered.)
90 See the CALL_EXPR handling case in copy_tree_body_r (). */
92 /* To Do:
94 o In order to make inlining-on-trees work, we pessimized
95 function-local static constants. In particular, they are now
96 always output, even when not addressed. Fix this by treating
97 function-local static constants just like global static
98 constants; the back-end already knows not to output them if they
99 are not needed.
101 o Provide heuristics to clamp inlining of recursive template
102 calls? */
105 /* Weights that estimate_num_insns uses for heuristics in inlining. */
107 eni_weights eni_inlining_weights;
109 /* Weights that estimate_num_insns uses to estimate the size of the
110 produced code. */
112 eni_weights eni_size_weights;
114 /* Weights that estimate_num_insns uses to estimate the time necessary
115 to execute the produced code. */
117 eni_weights eni_time_weights;
119 /* Prototypes. */
121 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
122 static void remap_block (tree *, copy_body_data *);
123 static void copy_bind_expr (tree *, int *, copy_body_data *);
124 static tree mark_local_for_remap_r (tree *, int *, void *);
125 static void unsave_expr_1 (tree);
126 static tree unsave_r (tree *, int *, void *);
127 static void declare_inline_vars (tree, tree);
128 static void remap_save_expr (tree *, void *, int *);
129 static void prepend_lexical_block (tree current_block, tree new_block);
130 static tree copy_decl_to_var (tree, copy_body_data *);
131 static tree copy_result_decl_to_var (tree, copy_body_data *);
132 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133 static gimple remap_gimple_stmt (gimple, copy_body_data *);
134 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
136 /* Insert a tree->tree mapping for ID. Despite the name suggests
137 that the trees should be variables, it is used for more than that. */
139 void
140 insert_decl_map (copy_body_data *id, tree key, tree value)
142 *pointer_map_insert (id->decl_map, key) = value;
144 /* Always insert an identity map as well. If we see this same new
145 node again, we won't want to duplicate it a second time. */
146 if (key != value)
147 *pointer_map_insert (id->decl_map, value) = value;
150 /* Insert a tree->tree mapping for ID. This is only used for
151 variables. */
153 static void
154 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 if (!gimple_in_ssa_p (id->src_cfun))
157 return;
159 if (!MAY_HAVE_DEBUG_STMTS)
160 return;
162 if (!target_for_debug_bind (key))
163 return;
165 gcc_assert (TREE_CODE (key) == PARM_DECL);
166 gcc_assert (TREE_CODE (value) == VAR_DECL);
168 if (!id->debug_map)
169 id->debug_map = pointer_map_create ();
171 *pointer_map_insert (id->debug_map, key) = value;
174 /* Construct new SSA name for old NAME. ID is the inline context. */
176 static tree
177 remap_ssa_name (tree name, copy_body_data *id)
179 tree new_tree;
180 tree *n;
182 gcc_assert (TREE_CODE (name) == SSA_NAME);
184 n = (tree *) pointer_map_contains (id->decl_map, name);
185 if (n)
186 return unshare_expr (*n);
188 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
189 in copy_bb. */
190 new_tree = remap_decl (SSA_NAME_VAR (name), id);
192 /* We might've substituted constant or another SSA_NAME for
193 the variable.
195 Replace the SSA name representing RESULT_DECL by variable during
196 inlining: this saves us from need to introduce PHI node in a case
197 return value is just partly initialized. */
198 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
199 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
200 || !id->transform_return_to_modify))
202 new_tree = make_ssa_name (new_tree, NULL);
203 insert_decl_map (id, name, new_tree);
204 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
205 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
206 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
207 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
209 /* By inlining function having uninitialized variable, we might
210 extend the lifetime (variable might get reused). This cause
211 ICE in the case we end up extending lifetime of SSA name across
212 abnormal edge, but also increase register pressure.
214 We simply initialize all uninitialized vars by 0 except
215 for case we are inlining to very first BB. We can avoid
216 this for all BBs that are not inside strongly connected
217 regions of the CFG, but this is expensive to test. */
218 if (id->entry_bb
219 && is_gimple_reg (SSA_NAME_VAR (name))
220 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
221 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
222 || EDGE_COUNT (id->entry_bb->preds) != 1))
224 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
225 gimple init_stmt;
227 init_stmt = gimple_build_assign (new_tree,
228 fold_convert (TREE_TYPE (new_tree),
229 integer_zero_node));
230 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
231 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
233 else
235 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
236 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
237 == name)
238 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
242 else
243 insert_decl_map (id, name, new_tree);
244 return new_tree;
247 /* If nonzero, we're remapping the contents of inlined debug
248 statements. If negative, an error has occurred, such as a
249 reference to a variable that isn't available in the inlined
250 context. */
251 int processing_debug_stmt = 0;
253 /* Remap DECL during the copying of the BLOCK tree for the function. */
255 tree
256 remap_decl (tree decl, copy_body_data *id)
258 tree *n;
259 tree fn;
261 /* We only remap local variables in the current function. */
262 fn = id->src_fn;
264 /* See if we have remapped this declaration. */
266 n = (tree *) pointer_map_contains (id->decl_map, decl);
268 if (!n && processing_debug_stmt)
270 processing_debug_stmt = -1;
271 return decl;
274 /* If we didn't already have an equivalent for this declaration,
275 create one now. */
276 if (!n)
278 /* Make a copy of the variable or label. */
279 tree t = id->copy_decl (decl, id);
281 /* Remember it, so that if we encounter this local entity again
282 we can reuse this copy. Do this early because remap_type may
283 need this decl for TYPE_STUB_DECL. */
284 insert_decl_map (id, decl, t);
286 if (!DECL_P (t))
287 return t;
289 /* Remap types, if necessary. */
290 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
291 if (TREE_CODE (t) == TYPE_DECL)
292 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
294 /* Remap sizes as necessary. */
295 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
296 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
298 /* If fields, do likewise for offset and qualifier. */
299 if (TREE_CODE (t) == FIELD_DECL)
301 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
302 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
303 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
306 if (cfun && gimple_in_ssa_p (cfun)
307 && (TREE_CODE (t) == VAR_DECL
308 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
310 tree def = gimple_default_def (id->src_cfun, decl);
311 get_var_ann (t);
312 if (TREE_CODE (decl) != PARM_DECL && def)
314 tree map = remap_ssa_name (def, id);
315 /* Watch out RESULT_DECLs whose SSA names map directly
316 to them. */
317 if (TREE_CODE (map) == SSA_NAME
318 && gimple_nop_p (SSA_NAME_DEF_STMT (map)))
319 set_default_def (t, map);
321 add_referenced_var (t);
323 return t;
326 if (id->do_not_unshare)
327 return *n;
328 else
329 return unshare_expr (*n);
332 static tree
333 remap_type_1 (tree type, copy_body_data *id)
335 tree new_tree, t;
337 /* We do need a copy. build and register it now. If this is a pointer or
338 reference type, remap the designated type and make a new pointer or
339 reference type. */
340 if (TREE_CODE (type) == POINTER_TYPE)
342 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
343 TYPE_MODE (type),
344 TYPE_REF_CAN_ALIAS_ALL (type));
345 insert_decl_map (id, type, new_tree);
346 return new_tree;
348 else if (TREE_CODE (type) == REFERENCE_TYPE)
350 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
351 TYPE_MODE (type),
352 TYPE_REF_CAN_ALIAS_ALL (type));
353 insert_decl_map (id, type, new_tree);
354 return new_tree;
356 else
357 new_tree = copy_node (type);
359 insert_decl_map (id, type, new_tree);
361 /* This is a new type, not a copy of an old type. Need to reassociate
362 variants. We can handle everything except the main variant lazily. */
363 t = TYPE_MAIN_VARIANT (type);
364 if (type != t)
366 t = remap_type (t, id);
367 TYPE_MAIN_VARIANT (new_tree) = t;
368 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
369 TYPE_NEXT_VARIANT (t) = new_tree;
371 else
373 TYPE_MAIN_VARIANT (new_tree) = new_tree;
374 TYPE_NEXT_VARIANT (new_tree) = NULL;
377 if (TYPE_STUB_DECL (type))
378 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
380 /* Lazily create pointer and reference types. */
381 TYPE_POINTER_TO (new_tree) = NULL;
382 TYPE_REFERENCE_TO (new_tree) = NULL;
384 switch (TREE_CODE (new_tree))
386 case INTEGER_TYPE:
387 case REAL_TYPE:
388 case FIXED_POINT_TYPE:
389 case ENUMERAL_TYPE:
390 case BOOLEAN_TYPE:
391 t = TYPE_MIN_VALUE (new_tree);
392 if (t && TREE_CODE (t) != INTEGER_CST)
393 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
395 t = TYPE_MAX_VALUE (new_tree);
396 if (t && TREE_CODE (t) != INTEGER_CST)
397 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
398 return new_tree;
400 case FUNCTION_TYPE:
401 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
402 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
403 return new_tree;
405 case ARRAY_TYPE:
406 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
407 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
408 break;
410 case RECORD_TYPE:
411 case UNION_TYPE:
412 case QUAL_UNION_TYPE:
414 tree f, nf = NULL;
416 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
418 t = remap_decl (f, id);
419 DECL_CONTEXT (t) = new_tree;
420 TREE_CHAIN (t) = nf;
421 nf = t;
423 TYPE_FIELDS (new_tree) = nreverse (nf);
425 break;
427 case OFFSET_TYPE:
428 default:
429 /* Shouldn't have been thought variable sized. */
430 gcc_unreachable ();
433 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
434 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
436 return new_tree;
439 tree
440 remap_type (tree type, copy_body_data *id)
442 tree *node;
443 tree tmp;
445 if (type == NULL)
446 return type;
448 /* See if we have remapped this type. */
449 node = (tree *) pointer_map_contains (id->decl_map, type);
450 if (node)
451 return *node;
453 /* The type only needs remapping if it's variably modified. */
454 if (! variably_modified_type_p (type, id->src_fn))
456 insert_decl_map (id, type, type);
457 return type;
460 id->remapping_type_depth++;
461 tmp = remap_type_1 (type, id);
462 id->remapping_type_depth--;
464 return tmp;
467 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
468 is NULL or TYPE has not been remapped before. */
470 static tree
471 remapped_type (tree type, copy_body_data *id)
473 tree *node;
475 if (type == NULL)
476 return type;
478 /* See if we have remapped this type. */
479 node = (tree *) pointer_map_contains (id->decl_map, type);
480 if (node)
481 return *node;
482 else
483 return NULL;
486 /* The type only needs remapping if it's variably modified. */
487 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
489 static bool
490 can_be_nonlocal (tree decl, copy_body_data *id)
492 /* We can not duplicate function decls. */
493 if (TREE_CODE (decl) == FUNCTION_DECL)
494 return true;
496 /* Local static vars must be non-local or we get multiple declaration
497 problems. */
498 if (TREE_CODE (decl) == VAR_DECL
499 && !auto_var_in_fn_p (decl, id->src_fn))
500 return true;
502 /* At the moment dwarf2out can handle only these types of nodes. We
503 can support more later. */
504 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
505 return false;
507 /* We must use global type. We call remapped_type instead of
508 remap_type since we don't want to remap this type here if it
509 hasn't been remapped before. */
510 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
511 return false;
513 /* Wihtout SSA we can't tell if variable is used. */
514 if (!gimple_in_ssa_p (cfun))
515 return false;
517 /* Live variables must be copied so we can attach DECL_RTL. */
518 if (var_ann (decl))
519 return false;
521 return true;
524 static tree
525 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
527 tree old_var;
528 tree new_decls = NULL_TREE;
530 /* Remap its variables. */
531 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
533 tree new_var;
534 tree origin_var = DECL_ORIGIN (old_var);
536 if (can_be_nonlocal (old_var, id))
538 if (TREE_CODE (old_var) == VAR_DECL
539 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
540 cfun->local_decls = tree_cons (NULL_TREE, old_var,
541 cfun->local_decls);
542 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
543 && !DECL_IGNORED_P (old_var)
544 && nonlocalized_list)
545 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
546 continue;
549 /* Remap the variable. */
550 new_var = remap_decl (old_var, id);
552 /* If we didn't remap this variable, we can't mess with its
553 TREE_CHAIN. If we remapped this variable to the return slot, it's
554 already declared somewhere else, so don't declare it here. */
556 if (new_var == id->retvar)
558 else if (!new_var)
560 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
561 && !DECL_IGNORED_P (old_var)
562 && nonlocalized_list)
563 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
565 else
567 gcc_assert (DECL_P (new_var));
568 TREE_CHAIN (new_var) = new_decls;
569 new_decls = new_var;
573 return nreverse (new_decls);
576 /* Copy the BLOCK to contain remapped versions of the variables
577 therein. And hook the new block into the block-tree. */
579 static void
580 remap_block (tree *block, copy_body_data *id)
582 tree old_block;
583 tree new_block;
584 tree fn;
586 /* Make the new block. */
587 old_block = *block;
588 new_block = make_node (BLOCK);
589 TREE_USED (new_block) = TREE_USED (old_block);
590 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
591 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
592 BLOCK_NONLOCALIZED_VARS (new_block)
593 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
594 *block = new_block;
596 /* Remap its variables. */
597 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
598 &BLOCK_NONLOCALIZED_VARS (new_block),
599 id);
601 fn = id->dst_fn;
603 if (id->transform_lang_insert_block)
604 id->transform_lang_insert_block (new_block);
606 /* Remember the remapped block. */
607 insert_decl_map (id, old_block, new_block);
610 /* Copy the whole block tree and root it in id->block. */
611 static tree
612 remap_blocks (tree block, copy_body_data *id)
614 tree t;
615 tree new_tree = block;
617 if (!block)
618 return NULL;
620 remap_block (&new_tree, id);
621 gcc_assert (new_tree != block);
622 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
623 prepend_lexical_block (new_tree, remap_blocks (t, id));
624 /* Blocks are in arbitrary order, but make things slightly prettier and do
625 not swap order when producing a copy. */
626 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
627 return new_tree;
630 static void
631 copy_statement_list (tree *tp)
633 tree_stmt_iterator oi, ni;
634 tree new_tree;
636 new_tree = alloc_stmt_list ();
637 ni = tsi_start (new_tree);
638 oi = tsi_start (*tp);
639 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
640 *tp = new_tree;
642 for (; !tsi_end_p (oi); tsi_next (&oi))
644 tree stmt = tsi_stmt (oi);
645 if (TREE_CODE (stmt) == STATEMENT_LIST)
646 copy_statement_list (&stmt);
647 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
651 static void
652 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
654 tree block = BIND_EXPR_BLOCK (*tp);
655 /* Copy (and replace) the statement. */
656 copy_tree_r (tp, walk_subtrees, NULL);
657 if (block)
659 remap_block (&block, id);
660 BIND_EXPR_BLOCK (*tp) = block;
663 if (BIND_EXPR_VARS (*tp))
664 /* This will remap a lot of the same decls again, but this should be
665 harmless. */
666 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
670 /* Create a new gimple_seq by remapping all the statements in BODY
671 using the inlining information in ID. */
673 gimple_seq
674 remap_gimple_seq (gimple_seq body, copy_body_data *id)
676 gimple_stmt_iterator si;
677 gimple_seq new_body = NULL;
679 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
681 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
682 gimple_seq_add_stmt (&new_body, new_stmt);
685 return new_body;
689 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
690 block using the mapping information in ID. */
692 static gimple
693 copy_gimple_bind (gimple stmt, copy_body_data *id)
695 gimple new_bind;
696 tree new_block, new_vars;
697 gimple_seq body, new_body;
699 /* Copy the statement. Note that we purposely don't use copy_stmt
700 here because we need to remap statements as we copy. */
701 body = gimple_bind_body (stmt);
702 new_body = remap_gimple_seq (body, id);
704 new_block = gimple_bind_block (stmt);
705 if (new_block)
706 remap_block (&new_block, id);
708 /* This will remap a lot of the same decls again, but this should be
709 harmless. */
710 new_vars = gimple_bind_vars (stmt);
711 if (new_vars)
712 new_vars = remap_decls (new_vars, NULL, id);
714 new_bind = gimple_build_bind (new_vars, new_body, new_block);
716 return new_bind;
720 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
721 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
722 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
723 recursing into the children nodes of *TP. */
725 static tree
726 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
728 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
729 copy_body_data *id = (copy_body_data *) wi_p->info;
730 tree fn = id->src_fn;
732 if (TREE_CODE (*tp) == SSA_NAME)
734 *tp = remap_ssa_name (*tp, id);
735 *walk_subtrees = 0;
736 return NULL;
738 else if (auto_var_in_fn_p (*tp, fn))
740 /* Local variables and labels need to be replaced by equivalent
741 variables. We don't want to copy static variables; there's
742 only one of those, no matter how many times we inline the
743 containing function. Similarly for globals from an outer
744 function. */
745 tree new_decl;
747 /* Remap the declaration. */
748 new_decl = remap_decl (*tp, id);
749 gcc_assert (new_decl);
750 /* Replace this variable with the copy. */
751 STRIP_TYPE_NOPS (new_decl);
752 /* ??? The C++ frontend uses void * pointer zero to initialize
753 any other type. This confuses the middle-end type verification.
754 As cloned bodies do not go through gimplification again the fixup
755 there doesn't trigger. */
756 if (TREE_CODE (new_decl) == INTEGER_CST
757 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
758 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
759 *tp = new_decl;
760 *walk_subtrees = 0;
762 else if (TREE_CODE (*tp) == STATEMENT_LIST)
763 gcc_unreachable ();
764 else if (TREE_CODE (*tp) == SAVE_EXPR)
765 gcc_unreachable ();
766 else if (TREE_CODE (*tp) == LABEL_DECL
767 && (!DECL_CONTEXT (*tp)
768 || decl_function_context (*tp) == id->src_fn))
769 /* These may need to be remapped for EH handling. */
770 *tp = remap_decl (*tp, id);
771 else if (TYPE_P (*tp))
772 /* Types may need remapping as well. */
773 *tp = remap_type (*tp, id);
774 else if (CONSTANT_CLASS_P (*tp))
776 /* If this is a constant, we have to copy the node iff the type
777 will be remapped. copy_tree_r will not copy a constant. */
778 tree new_type = remap_type (TREE_TYPE (*tp), id);
780 if (new_type == TREE_TYPE (*tp))
781 *walk_subtrees = 0;
783 else if (TREE_CODE (*tp) == INTEGER_CST)
784 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
785 TREE_INT_CST_HIGH (*tp));
786 else
788 *tp = copy_node (*tp);
789 TREE_TYPE (*tp) = new_type;
792 else
794 /* Otherwise, just copy the node. Note that copy_tree_r already
795 knows not to copy VAR_DECLs, etc., so this is safe. */
796 if (TREE_CODE (*tp) == INDIRECT_REF)
798 /* Get rid of *& from inline substitutions that can happen when a
799 pointer argument is an ADDR_EXPR. */
800 tree decl = TREE_OPERAND (*tp, 0);
801 tree *n;
803 n = (tree *) pointer_map_contains (id->decl_map, decl);
804 if (n)
806 tree type, new_tree, old;
808 /* If we happen to get an ADDR_EXPR in n->value, strip
809 it manually here as we'll eventually get ADDR_EXPRs
810 which lie about their types pointed to. In this case
811 build_fold_indirect_ref wouldn't strip the
812 INDIRECT_REF, but we absolutely rely on that. As
813 fold_indirect_ref does other useful transformations,
814 try that first, though. */
815 type = TREE_TYPE (TREE_TYPE (*n));
816 new_tree = unshare_expr (*n);
817 old = *tp;
818 *tp = gimple_fold_indirect_ref (new_tree);
819 if (!*tp)
821 if (TREE_CODE (new_tree) == ADDR_EXPR)
823 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
824 type, new_tree);
825 /* ??? We should either assert here or build
826 a VIEW_CONVERT_EXPR instead of blindly leaking
827 incompatible types to our IL. */
828 if (! *tp)
829 *tp = TREE_OPERAND (new_tree, 0);
831 else
833 *tp = build1 (INDIRECT_REF, type, new_tree);
834 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
835 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
838 *walk_subtrees = 0;
839 return NULL;
843 /* Here is the "usual case". Copy this tree node, and then
844 tweak some special cases. */
845 copy_tree_r (tp, walk_subtrees, NULL);
847 /* Global variables we haven't seen yet need to go into referenced
848 vars. If not referenced from types only. */
849 if (gimple_in_ssa_p (cfun)
850 && TREE_CODE (*tp) == VAR_DECL
851 && id->remapping_type_depth == 0
852 && !processing_debug_stmt)
853 add_referenced_var (*tp);
855 /* We should never have TREE_BLOCK set on non-statements. */
856 if (EXPR_P (*tp))
857 gcc_assert (!TREE_BLOCK (*tp));
859 if (TREE_CODE (*tp) != OMP_CLAUSE)
860 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
862 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
864 /* The copied TARGET_EXPR has never been expanded, even if the
865 original node was expanded already. */
866 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
867 TREE_OPERAND (*tp, 3) = NULL_TREE;
869 else if (TREE_CODE (*tp) == ADDR_EXPR)
871 /* Variable substitution need not be simple. In particular,
872 the INDIRECT_REF substitution above. Make sure that
873 TREE_CONSTANT and friends are up-to-date. But make sure
874 to not improperly set TREE_BLOCK on some sub-expressions. */
875 int invariant = is_gimple_min_invariant (*tp);
876 tree block = id->block;
877 id->block = NULL_TREE;
878 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
879 id->block = block;
881 /* Handle the case where we substituted an INDIRECT_REF
882 into the operand of the ADDR_EXPR. */
883 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
884 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
885 else
886 recompute_tree_invariant_for_addr_expr (*tp);
888 /* If this used to be invariant, but is not any longer,
889 then regimplification is probably needed. */
890 if (invariant && !is_gimple_min_invariant (*tp))
891 id->regimplify = true;
893 *walk_subtrees = 0;
897 /* Keep iterating. */
898 return NULL_TREE;
902 /* Called from copy_body_id via walk_tree. DATA is really a
903 `copy_body_data *'. */
905 tree
906 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
908 copy_body_data *id = (copy_body_data *) data;
909 tree fn = id->src_fn;
910 tree new_block;
912 /* Begin by recognizing trees that we'll completely rewrite for the
913 inlining context. Our output for these trees is completely
914 different from out input (e.g. RETURN_EXPR is deleted, and morphs
915 into an edge). Further down, we'll handle trees that get
916 duplicated and/or tweaked. */
918 /* When requested, RETURN_EXPRs should be transformed to just the
919 contained MODIFY_EXPR. The branch semantics of the return will
920 be handled elsewhere by manipulating the CFG rather than a statement. */
921 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
923 tree assignment = TREE_OPERAND (*tp, 0);
925 /* If we're returning something, just turn that into an
926 assignment into the equivalent of the original RESULT_DECL.
927 If the "assignment" is just the result decl, the result
928 decl has already been set (e.g. a recent "foo (&result_decl,
929 ...)"); just toss the entire RETURN_EXPR. */
930 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
932 /* Replace the RETURN_EXPR with (a copy of) the
933 MODIFY_EXPR hanging underneath. */
934 *tp = copy_node (assignment);
936 else /* Else the RETURN_EXPR returns no value. */
938 *tp = NULL;
939 return (tree) (void *)1;
942 else if (TREE_CODE (*tp) == SSA_NAME)
944 *tp = remap_ssa_name (*tp, id);
945 *walk_subtrees = 0;
946 return NULL;
949 /* Local variables and labels need to be replaced by equivalent
950 variables. We don't want to copy static variables; there's only
951 one of those, no matter how many times we inline the containing
952 function. Similarly for globals from an outer function. */
953 else if (auto_var_in_fn_p (*tp, fn))
955 tree new_decl;
957 /* Remap the declaration. */
958 new_decl = remap_decl (*tp, id);
959 gcc_assert (new_decl);
960 /* Replace this variable with the copy. */
961 STRIP_TYPE_NOPS (new_decl);
962 *tp = new_decl;
963 *walk_subtrees = 0;
965 else if (TREE_CODE (*tp) == STATEMENT_LIST)
966 copy_statement_list (tp);
967 else if (TREE_CODE (*tp) == SAVE_EXPR
968 || TREE_CODE (*tp) == TARGET_EXPR)
969 remap_save_expr (tp, id->decl_map, walk_subtrees);
970 else if (TREE_CODE (*tp) == LABEL_DECL
971 && (! DECL_CONTEXT (*tp)
972 || decl_function_context (*tp) == id->src_fn))
973 /* These may need to be remapped for EH handling. */
974 *tp = remap_decl (*tp, id);
975 else if (TREE_CODE (*tp) == BIND_EXPR)
976 copy_bind_expr (tp, walk_subtrees, id);
977 /* Types may need remapping as well. */
978 else if (TYPE_P (*tp))
979 *tp = remap_type (*tp, id);
981 /* If this is a constant, we have to copy the node iff the type will be
982 remapped. copy_tree_r will not copy a constant. */
983 else if (CONSTANT_CLASS_P (*tp))
985 tree new_type = remap_type (TREE_TYPE (*tp), id);
987 if (new_type == TREE_TYPE (*tp))
988 *walk_subtrees = 0;
990 else if (TREE_CODE (*tp) == INTEGER_CST)
991 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
992 TREE_INT_CST_HIGH (*tp));
993 else
995 *tp = copy_node (*tp);
996 TREE_TYPE (*tp) = new_type;
1000 /* Otherwise, just copy the node. Note that copy_tree_r already
1001 knows not to copy VAR_DECLs, etc., so this is safe. */
1002 else
1004 /* Here we handle trees that are not completely rewritten.
1005 First we detect some inlining-induced bogosities for
1006 discarding. */
1007 if (TREE_CODE (*tp) == MODIFY_EXPR
1008 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1009 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1011 /* Some assignments VAR = VAR; don't generate any rtl code
1012 and thus don't count as variable modification. Avoid
1013 keeping bogosities like 0 = 0. */
1014 tree decl = TREE_OPERAND (*tp, 0), value;
1015 tree *n;
1017 n = (tree *) pointer_map_contains (id->decl_map, decl);
1018 if (n)
1020 value = *n;
1021 STRIP_TYPE_NOPS (value);
1022 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1024 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1025 return copy_tree_body_r (tp, walk_subtrees, data);
1029 else if (TREE_CODE (*tp) == INDIRECT_REF)
1031 /* Get rid of *& from inline substitutions that can happen when a
1032 pointer argument is an ADDR_EXPR. */
1033 tree decl = TREE_OPERAND (*tp, 0);
1034 tree *n;
1036 n = (tree *) pointer_map_contains (id->decl_map, decl);
1037 if (n)
1039 tree new_tree;
1040 tree old;
1041 /* If we happen to get an ADDR_EXPR in n->value, strip
1042 it manually here as we'll eventually get ADDR_EXPRs
1043 which lie about their types pointed to. In this case
1044 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1045 but we absolutely rely on that. As fold_indirect_ref
1046 does other useful transformations, try that first, though. */
1047 tree type = TREE_TYPE (TREE_TYPE (*n));
1048 if (id->do_not_unshare)
1049 new_tree = *n;
1050 else
1051 new_tree = unshare_expr (*n);
1052 old = *tp;
1053 *tp = gimple_fold_indirect_ref (new_tree);
1054 if (! *tp)
1056 if (TREE_CODE (new_tree) == ADDR_EXPR)
1058 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1059 type, new_tree);
1060 /* ??? We should either assert here or build
1061 a VIEW_CONVERT_EXPR instead of blindly leaking
1062 incompatible types to our IL. */
1063 if (! *tp)
1064 *tp = TREE_OPERAND (new_tree, 0);
1066 else
1068 *tp = build1 (INDIRECT_REF, type, new_tree);
1069 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1070 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1073 *walk_subtrees = 0;
1074 return NULL;
1078 /* Here is the "usual case". Copy this tree node, and then
1079 tweak some special cases. */
1080 copy_tree_r (tp, walk_subtrees, NULL);
1082 /* Global variables we haven't seen yet needs to go into referenced
1083 vars. If not referenced from types or debug stmts only. */
1084 if (gimple_in_ssa_p (cfun)
1085 && TREE_CODE (*tp) == VAR_DECL
1086 && id->remapping_type_depth == 0
1087 && !processing_debug_stmt)
1088 add_referenced_var (*tp);
1090 /* If EXPR has block defined, map it to newly constructed block.
1091 When inlining we want EXPRs without block appear in the block
1092 of function call. */
1093 if (EXPR_P (*tp))
1095 new_block = id->block;
1096 if (TREE_BLOCK (*tp))
1098 tree *n;
1099 n = (tree *) pointer_map_contains (id->decl_map,
1100 TREE_BLOCK (*tp));
1101 gcc_assert (n);
1102 new_block = *n;
1104 TREE_BLOCK (*tp) = new_block;
1107 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
1108 TREE_OPERAND (*tp, 0) =
1109 build_int_cst (NULL_TREE,
1110 id->eh_region_offset
1111 + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
1113 if (TREE_CODE (*tp) != OMP_CLAUSE)
1114 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1116 /* The copied TARGET_EXPR has never been expanded, even if the
1117 original node was expanded already. */
1118 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1120 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1121 TREE_OPERAND (*tp, 3) = NULL_TREE;
1124 /* Variable substitution need not be simple. In particular, the
1125 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1126 and friends are up-to-date. */
1127 else if (TREE_CODE (*tp) == ADDR_EXPR)
1129 int invariant = is_gimple_min_invariant (*tp);
1130 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1132 /* Handle the case where we substituted an INDIRECT_REF
1133 into the operand of the ADDR_EXPR. */
1134 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1135 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1136 else
1137 recompute_tree_invariant_for_addr_expr (*tp);
1139 /* If this used to be invariant, but is not any longer,
1140 then regimplification is probably needed. */
1141 if (invariant && !is_gimple_min_invariant (*tp))
1142 id->regimplify = true;
1144 *walk_subtrees = 0;
1148 /* Keep iterating. */
1149 return NULL_TREE;
1153 /* Helper for copy_bb. Remap statement STMT using the inlining
1154 information in ID. Return the new statement copy. */
1156 static gimple
1157 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1159 gimple copy = NULL;
1160 struct walk_stmt_info wi;
1161 tree new_block;
1162 bool skip_first = false;
1164 /* Begin by recognizing trees that we'll completely rewrite for the
1165 inlining context. Our output for these trees is completely
1166 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1167 into an edge). Further down, we'll handle trees that get
1168 duplicated and/or tweaked. */
1170 /* When requested, GIMPLE_RETURNs should be transformed to just the
1171 contained GIMPLE_ASSIGN. The branch semantics of the return will
1172 be handled elsewhere by manipulating the CFG rather than the
1173 statement. */
1174 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1176 tree retval = gimple_return_retval (stmt);
1178 /* If we're returning something, just turn that into an
1179 assignment into the equivalent of the original RESULT_DECL.
1180 If RETVAL is just the result decl, the result decl has
1181 already been set (e.g. a recent "foo (&result_decl, ...)");
1182 just toss the entire GIMPLE_RETURN. */
1183 if (retval && TREE_CODE (retval) != RESULT_DECL)
1185 copy = gimple_build_assign (id->retvar, retval);
1186 /* id->retvar is already substituted. Skip it on later remapping. */
1187 skip_first = true;
1189 else
1190 return gimple_build_nop ();
1192 else if (gimple_has_substatements (stmt))
1194 gimple_seq s1, s2;
1196 /* When cloning bodies from the C++ front end, we will be handed bodies
1197 in High GIMPLE form. Handle here all the High GIMPLE statements that
1198 have embedded statements. */
1199 switch (gimple_code (stmt))
1201 case GIMPLE_BIND:
1202 copy = copy_gimple_bind (stmt, id);
1203 break;
1205 case GIMPLE_CATCH:
1206 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1207 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1208 break;
1210 case GIMPLE_EH_FILTER:
1211 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1212 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1213 break;
1215 case GIMPLE_TRY:
1216 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1217 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1218 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1219 break;
1221 case GIMPLE_WITH_CLEANUP_EXPR:
1222 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1223 copy = gimple_build_wce (s1);
1224 break;
1226 case GIMPLE_OMP_PARALLEL:
1227 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1228 copy = gimple_build_omp_parallel
1229 (s1,
1230 gimple_omp_parallel_clauses (stmt),
1231 gimple_omp_parallel_child_fn (stmt),
1232 gimple_omp_parallel_data_arg (stmt));
1233 break;
1235 case GIMPLE_OMP_TASK:
1236 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1237 copy = gimple_build_omp_task
1238 (s1,
1239 gimple_omp_task_clauses (stmt),
1240 gimple_omp_task_child_fn (stmt),
1241 gimple_omp_task_data_arg (stmt),
1242 gimple_omp_task_copy_fn (stmt),
1243 gimple_omp_task_arg_size (stmt),
1244 gimple_omp_task_arg_align (stmt));
1245 break;
1247 case GIMPLE_OMP_FOR:
1248 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1249 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1250 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1251 gimple_omp_for_collapse (stmt), s2);
1253 size_t i;
1254 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1256 gimple_omp_for_set_index (copy, i,
1257 gimple_omp_for_index (stmt, i));
1258 gimple_omp_for_set_initial (copy, i,
1259 gimple_omp_for_initial (stmt, i));
1260 gimple_omp_for_set_final (copy, i,
1261 gimple_omp_for_final (stmt, i));
1262 gimple_omp_for_set_incr (copy, i,
1263 gimple_omp_for_incr (stmt, i));
1264 gimple_omp_for_set_cond (copy, i,
1265 gimple_omp_for_cond (stmt, i));
1268 break;
1270 case GIMPLE_OMP_MASTER:
1271 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1272 copy = gimple_build_omp_master (s1);
1273 break;
1275 case GIMPLE_OMP_ORDERED:
1276 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1277 copy = gimple_build_omp_ordered (s1);
1278 break;
1280 case GIMPLE_OMP_SECTION:
1281 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1282 copy = gimple_build_omp_section (s1);
1283 break;
1285 case GIMPLE_OMP_SECTIONS:
1286 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1287 copy = gimple_build_omp_sections
1288 (s1, gimple_omp_sections_clauses (stmt));
1289 break;
1291 case GIMPLE_OMP_SINGLE:
1292 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1293 copy = gimple_build_omp_single
1294 (s1, gimple_omp_single_clauses (stmt));
1295 break;
1297 case GIMPLE_OMP_CRITICAL:
1298 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1299 copy
1300 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1301 break;
1303 default:
1304 gcc_unreachable ();
1307 else
1309 if (gimple_assign_copy_p (stmt)
1310 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1311 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1313 /* Here we handle statements that are not completely rewritten.
1314 First we detect some inlining-induced bogosities for
1315 discarding. */
1317 /* Some assignments VAR = VAR; don't generate any rtl code
1318 and thus don't count as variable modification. Avoid
1319 keeping bogosities like 0 = 0. */
1320 tree decl = gimple_assign_lhs (stmt), value;
1321 tree *n;
1323 n = (tree *) pointer_map_contains (id->decl_map, decl);
1324 if (n)
1326 value = *n;
1327 STRIP_TYPE_NOPS (value);
1328 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1329 return gimple_build_nop ();
1333 if (gimple_debug_bind_p (stmt))
1335 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1336 gimple_debug_bind_get_value (stmt),
1337 stmt);
1338 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1339 return copy;
1341 else
1342 /* Create a new deep copy of the statement. */
1343 copy = gimple_copy (stmt);
1346 /* If STMT has a block defined, map it to the newly constructed
1347 block. When inlining we want statements without a block to
1348 appear in the block of the function call. */
1349 new_block = id->block;
1350 if (gimple_block (copy))
1352 tree *n;
1353 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1354 gcc_assert (n);
1355 new_block = *n;
1358 gimple_set_block (copy, new_block);
1360 if (gimple_debug_bind_p (copy))
1361 return copy;
1363 /* Remap all the operands in COPY. */
1364 memset (&wi, 0, sizeof (wi));
1365 wi.info = id;
1366 if (skip_first)
1367 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1368 else
1369 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1371 /* Clear the copied virtual operands. We are not remapping them here
1372 but are going to recreate them from scratch. */
1373 if (gimple_has_mem_ops (copy))
1375 gimple_set_vdef (copy, NULL_TREE);
1376 gimple_set_vuse (copy, NULL_TREE);
1379 /* We have to handle EH region remapping of GIMPLE_RESX specially because
1380 the region number is not an operand. */
1381 if (gimple_code (stmt) == GIMPLE_RESX && id->eh_region_offset)
1383 gimple_resx_set_region (copy, gimple_resx_region (stmt) + id->eh_region_offset);
1385 return copy;
1389 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1390 later */
1392 static basic_block
1393 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1394 gcov_type count_scale)
1396 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1397 basic_block copy_basic_block;
1398 tree decl;
1400 /* create_basic_block() will append every new block to
1401 basic_block_info automatically. */
1402 copy_basic_block = create_basic_block (NULL, (void *) 0,
1403 (basic_block) bb->prev_bb->aux);
1404 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1406 /* We are going to rebuild frequencies from scratch. These values
1407 have just small importance to drive canonicalize_loop_headers. */
1408 copy_basic_block->frequency = ((gcov_type)bb->frequency
1409 * frequency_scale / REG_BR_PROB_BASE);
1411 if (copy_basic_block->frequency > BB_FREQ_MAX)
1412 copy_basic_block->frequency = BB_FREQ_MAX;
1414 copy_gsi = gsi_start_bb (copy_basic_block);
1416 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1418 gimple stmt = gsi_stmt (gsi);
1419 gimple orig_stmt = stmt;
1421 id->regimplify = false;
1422 stmt = remap_gimple_stmt (stmt, id);
1423 if (gimple_nop_p (stmt))
1424 continue;
1426 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1427 seq_gsi = copy_gsi;
1429 /* With return slot optimization we can end up with
1430 non-gimple (foo *)&this->m, fix that here. */
1431 if (is_gimple_assign (stmt)
1432 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1433 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1435 tree new_rhs;
1436 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1437 gimple_assign_rhs1 (stmt),
1438 true, NULL, false, GSI_NEW_STMT);
1439 gimple_assign_set_rhs1 (stmt, new_rhs);
1440 id->regimplify = false;
1443 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1445 if (id->regimplify)
1446 gimple_regimplify_operands (stmt, &seq_gsi);
1448 /* If copy_basic_block has been empty at the start of this iteration,
1449 call gsi_start_bb again to get at the newly added statements. */
1450 if (gsi_end_p (copy_gsi))
1451 copy_gsi = gsi_start_bb (copy_basic_block);
1452 else
1453 gsi_next (&copy_gsi);
1455 /* Process the new statement. The call to gimple_regimplify_operands
1456 possibly turned the statement into multiple statements, we
1457 need to process all of them. */
1460 tree fn;
1462 stmt = gsi_stmt (copy_gsi);
1463 if (is_gimple_call (stmt)
1464 && gimple_call_va_arg_pack_p (stmt)
1465 && id->gimple_call)
1467 /* __builtin_va_arg_pack () should be replaced by
1468 all arguments corresponding to ... in the caller. */
1469 tree p;
1470 gimple new_call;
1471 VEC(tree, heap) *argarray;
1472 size_t nargs = gimple_call_num_args (id->gimple_call);
1473 size_t n;
1475 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1476 nargs--;
1478 /* Create the new array of arguments. */
1479 n = nargs + gimple_call_num_args (stmt);
1480 argarray = VEC_alloc (tree, heap, n);
1481 VEC_safe_grow (tree, heap, argarray, n);
1483 /* Copy all the arguments before '...' */
1484 memcpy (VEC_address (tree, argarray),
1485 gimple_call_arg_ptr (stmt, 0),
1486 gimple_call_num_args (stmt) * sizeof (tree));
1488 /* Append the arguments passed in '...' */
1489 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1490 gimple_call_arg_ptr (id->gimple_call, 0)
1491 + (gimple_call_num_args (id->gimple_call) - nargs),
1492 nargs * sizeof (tree));
1494 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1495 argarray);
1497 VEC_free (tree, heap, argarray);
1499 /* Copy all GIMPLE_CALL flags, location and block, except
1500 GF_CALL_VA_ARG_PACK. */
1501 gimple_call_copy_flags (new_call, stmt);
1502 gimple_call_set_va_arg_pack (new_call, false);
1503 gimple_set_location (new_call, gimple_location (stmt));
1504 gimple_set_block (new_call, gimple_block (stmt));
1505 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1507 gsi_replace (&copy_gsi, new_call, false);
1508 gimple_set_bb (stmt, NULL);
1509 stmt = new_call;
1511 else if (is_gimple_call (stmt)
1512 && id->gimple_call
1513 && (decl = gimple_call_fndecl (stmt))
1514 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1515 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1517 /* __builtin_va_arg_pack_len () should be replaced by
1518 the number of anonymous arguments. */
1519 size_t nargs = gimple_call_num_args (id->gimple_call);
1520 tree count, p;
1521 gimple new_stmt;
1523 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1524 nargs--;
1526 count = build_int_cst (integer_type_node, nargs);
1527 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1528 gsi_replace (&copy_gsi, new_stmt, false);
1529 stmt = new_stmt;
1532 /* Statements produced by inlining can be unfolded, especially
1533 when we constant propagated some operands. We can't fold
1534 them right now for two reasons:
1535 1) folding require SSA_NAME_DEF_STMTs to be correct
1536 2) we can't change function calls to builtins.
1537 So we just mark statement for later folding. We mark
1538 all new statements, instead just statements that has changed
1539 by some nontrivial substitution so even statements made
1540 foldable indirectly are updated. If this turns out to be
1541 expensive, copy_body can be told to watch for nontrivial
1542 changes. */
1543 if (id->statements_to_fold)
1544 pointer_set_insert (id->statements_to_fold, stmt);
1546 /* We're duplicating a CALL_EXPR. Find any corresponding
1547 callgraph edges and update or duplicate them. */
1548 if (is_gimple_call (stmt))
1550 struct cgraph_edge *edge;
1551 int flags;
1553 switch (id->transform_call_graph_edges)
1555 case CB_CGE_DUPLICATE:
1556 edge = cgraph_edge (id->src_node, orig_stmt);
1557 if (edge)
1558 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1559 REG_BR_PROB_BASE, 1,
1560 edge->frequency, true);
1561 break;
1563 case CB_CGE_MOVE_CLONES:
1564 cgraph_set_call_stmt_including_clones (id->dst_node,
1565 orig_stmt, stmt);
1566 edge = cgraph_edge (id->dst_node, stmt);
1567 break;
1569 case CB_CGE_MOVE:
1570 edge = cgraph_edge (id->dst_node, orig_stmt);
1571 if (edge)
1572 cgraph_set_call_stmt (edge, stmt);
1573 break;
1575 default:
1576 gcc_unreachable ();
1579 /* Constant propagation on argument done during inlining
1580 may create new direct call. Produce an edge for it. */
1581 if ((!edge
1582 || (edge->indirect_call
1583 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1584 && is_gimple_call (stmt)
1585 && (fn = gimple_call_fndecl (stmt)) != NULL)
1587 struct cgraph_node *dest = cgraph_node (fn);
1589 /* We have missing edge in the callgraph. This can happen
1590 when previous inlining turned an indirect call into a
1591 direct call by constant propagating arguments. In all
1592 other cases we hit a bug (incorrect node sharing is the
1593 most common reason for missing edges). */
1594 gcc_assert (dest->needed || !dest->analyzed);
1595 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1596 cgraph_create_edge_including_clones
1597 (id->dst_node, dest, stmt, bb->count,
1598 compute_call_stmt_bb_frequency (id->dst_node->decl, bb),
1599 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1600 else
1601 cgraph_create_edge (id->dst_node, dest, stmt,
1602 bb->count, CGRAPH_FREQ_BASE,
1603 bb->loop_depth)->inline_failed
1604 = CIF_ORIGINALLY_INDIRECT_CALL;
1605 if (dump_file)
1607 fprintf (dump_file, "Created new direct edge to %s",
1608 cgraph_node_name (dest));
1612 flags = gimple_call_flags (stmt);
1613 if (flags & ECF_MAY_BE_ALLOCA)
1614 cfun->calls_alloca = true;
1615 if (flags & ECF_RETURNS_TWICE)
1616 cfun->calls_setjmp = true;
1619 /* If you think we can abort here, you are wrong.
1620 There is no region 0 in gimple. */
1621 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) != 0);
1623 if (stmt_could_throw_p (stmt)
1624 /* When we are cloning for inlining, we are supposed to
1625 construct a clone that calls precisely the same functions
1626 as original. However IPA optimizers might've proved
1627 earlier some function calls as non-trapping that might
1628 render some basic blocks dead that might become
1629 unreachable.
1631 We can't update SSA with unreachable blocks in CFG and thus
1632 we prevent the scenario by preserving even the "dead" eh
1633 edges until the point they are later removed by
1634 fixup_cfg pass. */
1635 || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
1636 && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
1638 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
1640 /* Add an entry for the copied tree in the EH hashtable.
1641 When cloning or versioning, use the hashtable in
1642 cfun, and just copy the EH number. When inlining, use the
1643 hashtable in the caller, and adjust the region number. */
1644 if (region > 0)
1645 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
1647 /* If this tree doesn't have a region associated with it,
1648 and there is a "current region,"
1649 then associate this tree with the current region
1650 and add edges associated with this region. */
1651 if (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) <= 0
1652 && id->eh_region > 0
1653 && stmt_could_throw_p (stmt))
1654 add_stmt_to_eh_region (stmt, id->eh_region);
1657 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1659 ssa_op_iter i;
1660 tree def;
1662 find_new_referenced_vars (gsi_stmt (copy_gsi));
1663 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1664 if (TREE_CODE (def) == SSA_NAME)
1665 SSA_NAME_DEF_STMT (def) = stmt;
1668 gsi_next (&copy_gsi);
1670 while (!gsi_end_p (copy_gsi));
1672 copy_gsi = gsi_last_bb (copy_basic_block);
1675 return copy_basic_block;
1678 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1679 form is quite easy, since dominator relationship for old basic blocks does
1680 not change.
1682 There is however exception where inlining might change dominator relation
1683 across EH edges from basic block within inlined functions destinating
1684 to landing pads in function we inline into.
1686 The function fills in PHI_RESULTs of such PHI nodes if they refer
1687 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1688 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1689 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1690 set, and this means that there will be no overlapping live ranges
1691 for the underlying symbol.
1693 This might change in future if we allow redirecting of EH edges and
1694 we might want to change way build CFG pre-inlining to include
1695 all the possible edges then. */
1696 static void
1697 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1698 bool can_throw, bool nonlocal_goto)
1700 edge e;
1701 edge_iterator ei;
1703 FOR_EACH_EDGE (e, ei, bb->succs)
1704 if (!e->dest->aux
1705 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1707 gimple phi;
1708 gimple_stmt_iterator si;
1710 if (!nonlocal_goto)
1711 gcc_assert (e->flags & EDGE_EH);
1713 if (!can_throw)
1714 gcc_assert (!(e->flags & EDGE_EH));
1716 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1718 edge re;
1720 phi = gsi_stmt (si);
1722 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1723 gcc_assert (!e->dest->aux);
1725 gcc_assert ((e->flags & EDGE_EH)
1726 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1728 if (!is_gimple_reg (PHI_RESULT (phi)))
1730 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1731 continue;
1734 re = find_edge (ret_bb, e->dest);
1735 gcc_assert (re);
1736 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1737 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1739 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1740 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1746 /* Copy edges from BB into its copy constructed earlier, scale profile
1747 accordingly. Edges will be taken care of later. Assume aux
1748 pointers to point to the copies of each BB. */
1750 static void
1751 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1753 basic_block new_bb = (basic_block) bb->aux;
1754 edge_iterator ei;
1755 edge old_edge;
1756 gimple_stmt_iterator si;
1757 int flags;
1759 /* Use the indices from the original blocks to create edges for the
1760 new ones. */
1761 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1762 if (!(old_edge->flags & EDGE_EH))
1764 edge new_edge;
1766 flags = old_edge->flags;
1768 /* Return edges do get a FALLTHRU flag when the get inlined. */
1769 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1770 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1771 flags |= EDGE_FALLTHRU;
1772 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1773 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1774 new_edge->probability = old_edge->probability;
1777 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1778 return;
1780 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1782 gimple copy_stmt;
1783 bool can_throw, nonlocal_goto;
1785 copy_stmt = gsi_stmt (si);
1786 if (!is_gimple_debug (copy_stmt))
1788 update_stmt (copy_stmt);
1789 if (gimple_in_ssa_p (cfun))
1790 mark_symbols_for_renaming (copy_stmt);
1793 /* Do this before the possible split_block. */
1794 gsi_next (&si);
1796 /* If this tree could throw an exception, there are two
1797 cases where we need to add abnormal edge(s): the
1798 tree wasn't in a region and there is a "current
1799 region" in the caller; or the original tree had
1800 EH edges. In both cases split the block after the tree,
1801 and add abnormal edge(s) as needed; we need both
1802 those from the callee and the caller.
1803 We check whether the copy can throw, because the const
1804 propagation can change an INDIRECT_REF which throws
1805 into a COMPONENT_REF which doesn't. If the copy
1806 can throw, the original could also throw. */
1807 can_throw = stmt_can_throw_internal (copy_stmt);
1808 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1810 if (can_throw || nonlocal_goto)
1812 if (!gsi_end_p (si))
1813 /* Note that bb's predecessor edges aren't necessarily
1814 right at this point; split_block doesn't care. */
1816 edge e = split_block (new_bb, copy_stmt);
1818 new_bb = e->dest;
1819 new_bb->aux = e->src->aux;
1820 si = gsi_start_bb (new_bb);
1824 if (can_throw)
1825 make_eh_edges (copy_stmt);
1827 if (nonlocal_goto)
1828 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1830 if ((can_throw || nonlocal_goto)
1831 && gimple_in_ssa_p (cfun))
1832 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1833 can_throw, nonlocal_goto);
1837 /* Copy the PHIs. All blocks and edges are copied, some blocks
1838 was possibly split and new outgoing EH edges inserted.
1839 BB points to the block of original function and AUX pointers links
1840 the original and newly copied blocks. */
1842 static void
1843 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1845 basic_block const new_bb = (basic_block) bb->aux;
1846 edge_iterator ei;
1847 gimple phi;
1848 gimple_stmt_iterator si;
1850 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1852 tree res, new_res;
1853 gimple new_phi;
1854 edge new_edge;
1856 phi = gsi_stmt (si);
1857 res = PHI_RESULT (phi);
1858 new_res = res;
1859 if (is_gimple_reg (res))
1861 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1862 SSA_NAME_DEF_STMT (new_res)
1863 = new_phi = create_phi_node (new_res, new_bb);
1864 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1866 edge const old_edge
1867 = find_edge ((basic_block) new_edge->src->aux, bb);
1868 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1869 tree new_arg = arg;
1870 tree block = id->block;
1871 id->block = NULL_TREE;
1872 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1873 id->block = block;
1874 gcc_assert (new_arg);
1875 /* With return slot optimization we can end up with
1876 non-gimple (foo *)&this->m, fix that here. */
1877 if (TREE_CODE (new_arg) != SSA_NAME
1878 && TREE_CODE (new_arg) != FUNCTION_DECL
1879 && !is_gimple_val (new_arg))
1881 gimple_seq stmts = NULL;
1882 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1883 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1885 add_phi_arg (new_phi, new_arg, new_edge,
1886 gimple_phi_arg_location_from_edge (phi, old_edge));
1893 /* Wrapper for remap_decl so it can be used as a callback. */
1895 static tree
1896 remap_decl_1 (tree decl, void *data)
1898 return remap_decl (decl, (copy_body_data *) data);
1901 /* Build struct function and associated datastructures for the new clone
1902 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1904 static void
1905 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1906 int frequency)
1908 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1909 gcov_type count_scale, frequency_scale;
1911 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1912 count_scale = (REG_BR_PROB_BASE * count
1913 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1914 else
1915 count_scale = 1;
1917 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1918 frequency_scale = (REG_BR_PROB_BASE * frequency
1920 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1921 else
1922 frequency_scale = count_scale;
1924 /* Register specific tree functions. */
1925 gimple_register_cfg_hooks ();
1927 /* Get clean struct function. */
1928 push_struct_function (new_fndecl);
1930 /* We will rebuild these, so just sanity check that they are empty. */
1931 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
1932 gcc_assert (cfun->local_decls == NULL);
1933 gcc_assert (cfun->cfg == NULL);
1934 gcc_assert (cfun->decl == new_fndecl);
1936 /* Copy items we preserve during clonning. */
1937 cfun->static_chain_decl = src_cfun->static_chain_decl;
1938 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
1939 cfun->function_end_locus = src_cfun->function_end_locus;
1940 cfun->curr_properties = src_cfun->curr_properties;
1941 cfun->last_verified = src_cfun->last_verified;
1942 if (src_cfun->ipa_transforms_to_apply)
1943 cfun->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
1944 src_cfun->ipa_transforms_to_apply);
1945 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
1946 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
1947 cfun->function_frequency = src_cfun->function_frequency;
1948 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
1949 cfun->stdarg = src_cfun->stdarg;
1950 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
1951 cfun->after_inlining = src_cfun->after_inlining;
1952 cfun->returns_struct = src_cfun->returns_struct;
1953 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
1954 cfun->after_tree_profile = src_cfun->after_tree_profile;
1956 init_empty_tree_cfg ();
1958 ENTRY_BLOCK_PTR->count =
1959 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1960 REG_BR_PROB_BASE);
1961 ENTRY_BLOCK_PTR->frequency =
1962 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1963 frequency_scale / REG_BR_PROB_BASE);
1964 EXIT_BLOCK_PTR->count =
1965 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1966 REG_BR_PROB_BASE);
1967 EXIT_BLOCK_PTR->frequency =
1968 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1969 frequency_scale / REG_BR_PROB_BASE);
1970 if (src_cfun->eh)
1971 init_eh_for_function ();
1973 if (src_cfun->gimple_df)
1975 init_tree_ssa (cfun);
1976 cfun->gimple_df->in_ssa_p = true;
1977 init_ssa_operands ();
1979 pop_cfun ();
1982 /* Make a copy of the body of FN so that it can be inserted inline in
1983 another function. Walks FN via CFG, returns new fndecl. */
1985 static tree
1986 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
1987 basic_block entry_block_map, basic_block exit_block_map)
1989 tree callee_fndecl = id->src_fn;
1990 /* Original cfun for the callee, doesn't change. */
1991 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1992 struct function *cfun_to_copy;
1993 basic_block bb;
1994 tree new_fndecl = NULL;
1995 gcov_type count_scale, frequency_scale;
1996 int last;
1998 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1999 count_scale = (REG_BR_PROB_BASE * count
2000 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2001 else
2002 count_scale = 1;
2004 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
2005 frequency_scale = (REG_BR_PROB_BASE * frequency
2007 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
2008 else
2009 frequency_scale = count_scale;
2011 /* Register specific tree functions. */
2012 gimple_register_cfg_hooks ();
2014 /* Must have a CFG here at this point. */
2015 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2016 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2018 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2020 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2021 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2022 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2023 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2025 /* Duplicate any exception-handling regions. */
2026 if (cfun->eh)
2028 id->eh_region_offset
2029 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
2030 0, id->eh_region);
2033 /* Use aux pointers to map the original blocks to copy. */
2034 FOR_EACH_BB_FN (bb, cfun_to_copy)
2036 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2037 bb->aux = new_bb;
2038 new_bb->aux = bb;
2041 last = last_basic_block;
2043 /* Now that we've duplicated the blocks, duplicate their edges. */
2044 FOR_ALL_BB_FN (bb, cfun_to_copy)
2045 copy_edges_for_bb (bb, count_scale, exit_block_map);
2047 if (gimple_in_ssa_p (cfun))
2048 FOR_ALL_BB_FN (bb, cfun_to_copy)
2049 copy_phis_for_bb (bb, id);
2051 FOR_ALL_BB_FN (bb, cfun_to_copy)
2053 ((basic_block)bb->aux)->aux = NULL;
2054 bb->aux = NULL;
2057 /* Zero out AUX fields of newly created block during EH edge
2058 insertion. */
2059 for (; last < last_basic_block; last++)
2060 BASIC_BLOCK (last)->aux = NULL;
2061 entry_block_map->aux = NULL;
2062 exit_block_map->aux = NULL;
2064 return new_fndecl;
2067 /* Copy the debug STMT using ID. We deal with these statements in a
2068 special way: if any variable in their VALUE expression wasn't
2069 remapped yet, we won't remap it, because that would get decl uids
2070 out of sync, causing codegen differences between -g and -g0. If
2071 this arises, we drop the VALUE expression altogether. */
2073 static void
2074 copy_debug_stmt (gimple stmt, copy_body_data *id)
2076 tree t, *n;
2077 struct walk_stmt_info wi;
2079 t = id->block;
2080 if (gimple_block (stmt))
2082 tree *n;
2083 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2084 if (n)
2085 t = *n;
2087 gimple_set_block (stmt, t);
2089 /* Remap all the operands in COPY. */
2090 memset (&wi, 0, sizeof (wi));
2091 wi.info = id;
2093 processing_debug_stmt = 1;
2095 t = gimple_debug_bind_get_var (stmt);
2097 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2098 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2100 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2101 t = *n;
2103 else
2104 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2106 gimple_debug_bind_set_var (stmt, t);
2108 if (gimple_debug_bind_has_value_p (stmt))
2109 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2110 remap_gimple_op_r, &wi, NULL);
2112 /* Punt if any decl couldn't be remapped. */
2113 if (processing_debug_stmt < 0)
2114 gimple_debug_bind_reset_value (stmt);
2116 processing_debug_stmt = 0;
2118 update_stmt (stmt);
2119 if (gimple_in_ssa_p (cfun))
2120 mark_symbols_for_renaming (stmt);
2123 /* Process deferred debug stmts. In order to give values better odds
2124 of being successfully remapped, we delay the processing of debug
2125 stmts until all other stmts that might require remapping are
2126 processed. */
2128 static void
2129 copy_debug_stmts (copy_body_data *id)
2131 size_t i;
2132 gimple stmt;
2134 if (!id->debug_stmts)
2135 return;
2137 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2138 copy_debug_stmt (stmt, id);
2140 VEC_free (gimple, heap, id->debug_stmts);
2143 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2144 another function. */
2146 static tree
2147 copy_tree_body (copy_body_data *id)
2149 tree fndecl = id->src_fn;
2150 tree body = DECL_SAVED_TREE (fndecl);
2152 walk_tree (&body, copy_tree_body_r, id, NULL);
2154 return body;
2157 /* Make a copy of the body of FN so that it can be inserted inline in
2158 another function. */
2160 static tree
2161 copy_body (copy_body_data *id, gcov_type count, int frequency,
2162 basic_block entry_block_map, basic_block exit_block_map)
2164 tree fndecl = id->src_fn;
2165 tree body;
2167 /* If this body has a CFG, walk CFG and copy. */
2168 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2169 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
2170 copy_debug_stmts (id);
2172 return body;
2175 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2176 defined in function FN, or of a data member thereof. */
2178 static bool
2179 self_inlining_addr_expr (tree value, tree fn)
2181 tree var;
2183 if (TREE_CODE (value) != ADDR_EXPR)
2184 return false;
2186 var = get_base_address (TREE_OPERAND (value, 0));
2188 return var && auto_var_in_fn_p (var, fn);
2191 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2192 lexical block and line number information from base_stmt, if given,
2193 or from the last stmt of the block otherwise. */
2195 static gimple
2196 insert_init_debug_bind (copy_body_data *id,
2197 basic_block bb, tree var, tree value,
2198 gimple base_stmt)
2200 gimple note;
2201 gimple_stmt_iterator gsi;
2202 tree tracked_var;
2204 if (!gimple_in_ssa_p (id->src_cfun))
2205 return NULL;
2207 if (!MAY_HAVE_DEBUG_STMTS)
2208 return NULL;
2210 tracked_var = target_for_debug_bind (var);
2211 if (!tracked_var)
2212 return NULL;
2214 if (bb)
2216 gsi = gsi_last_bb (bb);
2217 if (!base_stmt && !gsi_end_p (gsi))
2218 base_stmt = gsi_stmt (gsi);
2221 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2223 if (bb)
2225 if (!gsi_end_p (gsi))
2226 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2227 else
2228 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2231 return note;
2234 static void
2235 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2237 /* If VAR represents a zero-sized variable, it's possible that the
2238 assignment statement may result in no gimple statements. */
2239 if (init_stmt)
2241 gimple_stmt_iterator si = gsi_last_bb (bb);
2243 /* We can end up with init statements that store to a non-register
2244 from a rhs with a conversion. Handle that here by forcing the
2245 rhs into a temporary. gimple_regimplify_operands is not
2246 prepared to do this for us. */
2247 if (!is_gimple_debug (init_stmt)
2248 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2249 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2250 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2252 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2253 gimple_expr_type (init_stmt),
2254 gimple_assign_rhs1 (init_stmt));
2255 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2256 GSI_NEW_STMT);
2257 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2258 gimple_assign_set_rhs1 (init_stmt, rhs);
2260 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2261 gimple_regimplify_operands (init_stmt, &si);
2262 mark_symbols_for_renaming (init_stmt);
2264 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2266 tree var, def = gimple_assign_lhs (init_stmt);
2268 if (TREE_CODE (def) == SSA_NAME)
2269 var = SSA_NAME_VAR (def);
2270 else
2271 var = def;
2273 insert_init_debug_bind (id, bb, var, def, init_stmt);
2278 /* Initialize parameter P with VALUE. If needed, produce init statement
2279 at the end of BB. When BB is NULL, we return init statement to be
2280 output later. */
2281 static gimple
2282 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2283 basic_block bb, tree *vars)
2285 gimple init_stmt = NULL;
2286 tree var;
2287 tree rhs = value;
2288 tree def = (gimple_in_ssa_p (cfun)
2289 ? gimple_default_def (id->src_cfun, p) : NULL);
2291 if (value
2292 && value != error_mark_node
2293 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2295 if (fold_convertible_p (TREE_TYPE (p), value))
2296 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2297 else
2298 /* ??? For valid (GIMPLE) programs we should not end up here.
2299 Still if something has gone wrong and we end up with truly
2300 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2301 to not leak invalid GIMPLE to the following passes. */
2302 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2305 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2306 here since the type of this decl must be visible to the calling
2307 function. */
2308 var = copy_decl_to_var (p, id);
2310 /* We're actually using the newly-created var. */
2311 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2313 get_var_ann (var);
2314 add_referenced_var (var);
2317 /* Declare this new variable. */
2318 TREE_CHAIN (var) = *vars;
2319 *vars = var;
2321 /* Make gimplifier happy about this variable. */
2322 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2324 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2325 we would not need to create a new variable here at all, if it
2326 weren't for debug info. Still, we can just use the argument
2327 value. */
2328 if (TREE_READONLY (p)
2329 && !TREE_ADDRESSABLE (p)
2330 && value && !TREE_SIDE_EFFECTS (value)
2331 && !def)
2333 /* We may produce non-gimple trees by adding NOPs or introduce
2334 invalid sharing when operand is not really constant.
2335 It is not big deal to prohibit constant propagation here as
2336 we will constant propagate in DOM1 pass anyway. */
2337 if (is_gimple_min_invariant (value)
2338 && useless_type_conversion_p (TREE_TYPE (p),
2339 TREE_TYPE (value))
2340 /* We have to be very careful about ADDR_EXPR. Make sure
2341 the base variable isn't a local variable of the inlined
2342 function, e.g., when doing recursive inlining, direct or
2343 mutually-recursive or whatever, which is why we don't
2344 just test whether fn == current_function_decl. */
2345 && ! self_inlining_addr_expr (value, fn))
2347 insert_decl_map (id, p, value);
2348 insert_debug_decl_map (id, p, var);
2349 return insert_init_debug_bind (id, bb, var, value, NULL);
2353 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2354 that way, when the PARM_DECL is encountered, it will be
2355 automatically replaced by the VAR_DECL. */
2356 insert_decl_map (id, p, var);
2358 /* Even if P was TREE_READONLY, the new VAR should not be.
2359 In the original code, we would have constructed a
2360 temporary, and then the function body would have never
2361 changed the value of P. However, now, we will be
2362 constructing VAR directly. The constructor body may
2363 change its value multiple times as it is being
2364 constructed. Therefore, it must not be TREE_READONLY;
2365 the back-end assumes that TREE_READONLY variable is
2366 assigned to only once. */
2367 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2368 TREE_READONLY (var) = 0;
2370 /* If there is no setup required and we are in SSA, take the easy route
2371 replacing all SSA names representing the function parameter by the
2372 SSA name passed to function.
2374 We need to construct map for the variable anyway as it might be used
2375 in different SSA names when parameter is set in function.
2377 Do replacement at -O0 for const arguments replaced by constant.
2378 This is important for builtin_constant_p and other construct requiring
2379 constant argument to be visible in inlined function body. */
2380 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2381 && (optimize
2382 || (TREE_READONLY (p)
2383 && is_gimple_min_invariant (rhs)))
2384 && (TREE_CODE (rhs) == SSA_NAME
2385 || is_gimple_min_invariant (rhs))
2386 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2388 insert_decl_map (id, def, rhs);
2389 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2392 /* If the value of argument is never used, don't care about initializing
2393 it. */
2394 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2396 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2397 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2400 /* Initialize this VAR_DECL from the equivalent argument. Convert
2401 the argument to the proper type in case it was promoted. */
2402 if (value)
2404 if (rhs == error_mark_node)
2406 insert_decl_map (id, p, var);
2407 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2410 STRIP_USELESS_TYPE_CONVERSION (rhs);
2412 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2413 keep our trees in gimple form. */
2414 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2416 def = remap_ssa_name (def, id);
2417 init_stmt = gimple_build_assign (def, rhs);
2418 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2419 set_default_def (var, NULL);
2421 else
2422 init_stmt = gimple_build_assign (var, rhs);
2424 if (bb && init_stmt)
2425 insert_init_stmt (id, bb, init_stmt);
2427 return init_stmt;
2430 /* Generate code to initialize the parameters of the function at the
2431 top of the stack in ID from the GIMPLE_CALL STMT. */
2433 static void
2434 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2435 tree fn, basic_block bb)
2437 tree parms;
2438 size_t i;
2439 tree p;
2440 tree vars = NULL_TREE;
2441 tree static_chain = gimple_call_chain (stmt);
2443 /* Figure out what the parameters are. */
2444 parms = DECL_ARGUMENTS (fn);
2446 /* Loop through the parameter declarations, replacing each with an
2447 equivalent VAR_DECL, appropriately initialized. */
2448 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2450 tree val;
2451 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2452 setup_one_parameter (id, p, val, fn, bb, &vars);
2455 /* Initialize the static chain. */
2456 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2457 gcc_assert (fn != current_function_decl);
2458 if (p)
2460 /* No static chain? Seems like a bug in tree-nested.c. */
2461 gcc_assert (static_chain);
2463 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2466 declare_inline_vars (id->block, vars);
2470 /* Declare a return variable to replace the RESULT_DECL for the
2471 function we are calling. An appropriate DECL_STMT is returned.
2472 The USE_STMT is filled to contain a use of the declaration to
2473 indicate the return value of the function.
2475 RETURN_SLOT, if non-null is place where to store the result. It
2476 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2477 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2479 The return value is a (possibly null) value that is the result of the
2480 function as seen by the callee. *USE_P is a (possibly null) value that
2481 holds the result as seen by the caller. */
2483 static tree
2484 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2485 tree *use_p)
2487 tree callee = id->src_fn;
2488 tree caller = id->dst_fn;
2489 tree result = DECL_RESULT (callee);
2490 tree callee_type = TREE_TYPE (result);
2491 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
2492 tree var, use;
2494 /* We don't need to do anything for functions that don't return
2495 anything. */
2496 if (!result || VOID_TYPE_P (callee_type))
2498 *use_p = NULL_TREE;
2499 return NULL_TREE;
2502 /* If there was a return slot, then the return value is the
2503 dereferenced address of that object. */
2504 if (return_slot)
2506 /* The front end shouldn't have used both return_slot and
2507 a modify expression. */
2508 gcc_assert (!modify_dest);
2509 if (DECL_BY_REFERENCE (result))
2511 tree return_slot_addr = build_fold_addr_expr (return_slot);
2512 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2514 /* We are going to construct *&return_slot and we can't do that
2515 for variables believed to be not addressable.
2517 FIXME: This check possibly can match, because values returned
2518 via return slot optimization are not believed to have address
2519 taken by alias analysis. */
2520 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2521 if (gimple_in_ssa_p (cfun))
2523 HOST_WIDE_INT bitsize;
2524 HOST_WIDE_INT bitpos;
2525 tree offset;
2526 enum machine_mode mode;
2527 int unsignedp;
2528 int volatilep;
2529 tree base;
2530 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2531 &offset,
2532 &mode, &unsignedp, &volatilep,
2533 false);
2534 if (TREE_CODE (base) == INDIRECT_REF)
2535 base = TREE_OPERAND (base, 0);
2536 if (TREE_CODE (base) == SSA_NAME)
2537 base = SSA_NAME_VAR (base);
2538 mark_sym_for_renaming (base);
2540 var = return_slot_addr;
2542 else
2544 var = return_slot;
2545 gcc_assert (TREE_CODE (var) != SSA_NAME);
2546 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2548 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2549 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2550 && !DECL_GIMPLE_REG_P (result)
2551 && DECL_P (var))
2552 DECL_GIMPLE_REG_P (var) = 0;
2553 use = NULL;
2554 goto done;
2557 /* All types requiring non-trivial constructors should have been handled. */
2558 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2560 /* Attempt to avoid creating a new temporary variable. */
2561 if (modify_dest
2562 && TREE_CODE (modify_dest) != SSA_NAME)
2564 bool use_it = false;
2566 /* We can't use MODIFY_DEST if there's type promotion involved. */
2567 if (!useless_type_conversion_p (callee_type, caller_type))
2568 use_it = false;
2570 /* ??? If we're assigning to a variable sized type, then we must
2571 reuse the destination variable, because we've no good way to
2572 create variable sized temporaries at this point. */
2573 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2574 use_it = true;
2576 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2577 reuse it as the result of the call directly. Don't do this if
2578 it would promote MODIFY_DEST to addressable. */
2579 else if (TREE_ADDRESSABLE (result))
2580 use_it = false;
2581 else
2583 tree base_m = get_base_address (modify_dest);
2585 /* If the base isn't a decl, then it's a pointer, and we don't
2586 know where that's going to go. */
2587 if (!DECL_P (base_m))
2588 use_it = false;
2589 else if (is_global_var (base_m))
2590 use_it = false;
2591 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2592 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2593 && !DECL_GIMPLE_REG_P (result)
2594 && DECL_GIMPLE_REG_P (base_m))
2595 use_it = false;
2596 else if (!TREE_ADDRESSABLE (base_m))
2597 use_it = true;
2600 if (use_it)
2602 var = modify_dest;
2603 use = NULL;
2604 goto done;
2608 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2610 var = copy_result_decl_to_var (result, id);
2611 if (gimple_in_ssa_p (cfun))
2613 get_var_ann (var);
2614 add_referenced_var (var);
2617 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2618 DECL_STRUCT_FUNCTION (caller)->local_decls
2619 = tree_cons (NULL_TREE, var,
2620 DECL_STRUCT_FUNCTION (caller)->local_decls);
2622 /* Do not have the rest of GCC warn about this variable as it should
2623 not be visible to the user. */
2624 TREE_NO_WARNING (var) = 1;
2626 declare_inline_vars (id->block, var);
2628 /* Build the use expr. If the return type of the function was
2629 promoted, convert it back to the expected type. */
2630 use = var;
2631 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2632 use = fold_convert (caller_type, var);
2634 STRIP_USELESS_TYPE_CONVERSION (use);
2636 if (DECL_BY_REFERENCE (result))
2638 TREE_ADDRESSABLE (var) = 1;
2639 var = build_fold_addr_expr (var);
2642 done:
2643 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2644 way, when the RESULT_DECL is encountered, it will be
2645 automatically replaced by the VAR_DECL. */
2646 insert_decl_map (id, result, var);
2648 /* Remember this so we can ignore it in remap_decls. */
2649 id->retvar = var;
2651 *use_p = use;
2652 return var;
2655 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2656 to a local label. */
2658 static tree
2659 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2661 tree node = *nodep;
2662 tree fn = (tree) fnp;
2664 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2665 return node;
2667 if (TYPE_P (node))
2668 *walk_subtrees = 0;
2670 return NULL_TREE;
2673 /* Callback through walk_tree. Determine if we've got an aggregate
2674 type that we can't support; return non-null if so. */
2676 static tree
2677 cannot_copy_type_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
2678 void *data ATTRIBUTE_UNUSED)
2680 tree t, node = *nodep;
2682 if (TREE_CODE (node) == RECORD_TYPE || TREE_CODE (node) == UNION_TYPE)
2684 /* We cannot inline a function of the form
2686 void F (int i) { struct S { int ar[i]; } s; }
2688 Attempting to do so produces a catch-22.
2689 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
2690 UNION_TYPE nodes, then it goes into infinite recursion on a
2691 structure containing a pointer to its own type. If it doesn't,
2692 then the type node for S doesn't get adjusted properly when
2693 F is inlined.
2695 ??? This is likely no longer true, but it's too late in the 4.0
2696 cycle to try to find out. This should be checked for 4.1. */
2697 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
2698 if (variably_modified_type_p (TREE_TYPE (t), NULL))
2699 return node;
2702 return NULL_TREE;
2706 /* Determine if the function can be copied. If so return NULL. If
2707 not return a string describng the reason for failure. */
2709 static const char *
2710 copy_forbidden (struct function *fun, tree fndecl)
2712 const char *reason = fun->cannot_be_copied_reason;
2713 tree step;
2715 /* Only examine the function once. */
2716 if (fun->cannot_be_copied_set)
2717 return reason;
2719 /* We cannot copy a function that receives a non-local goto
2720 because we cannot remap the destination label used in the
2721 function that is performing the non-local goto. */
2722 /* ??? Actually, this should be possible, if we work at it.
2723 No doubt there's just a handful of places that simply
2724 assume it doesn't happen and don't substitute properly. */
2725 if (fun->has_nonlocal_label)
2727 reason = G_("function %q+F can never be copied "
2728 "because it receives a non-local goto");
2729 goto fail;
2732 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2734 tree decl = TREE_VALUE (step);
2736 if (TREE_CODE (decl) == VAR_DECL
2737 && TREE_STATIC (decl)
2738 && !DECL_EXTERNAL (decl)
2739 && DECL_INITIAL (decl)
2740 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2741 has_label_address_in_static_1,
2742 fndecl))
2744 reason = G_("function %q+F can never be copied because it saves "
2745 "address of local label in a static variable");
2746 goto fail;
2749 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2750 && variably_modified_type_p (TREE_TYPE (decl), NULL)
2751 && walk_tree_without_duplicates (&TREE_TYPE (decl),
2752 cannot_copy_type_1, NULL))
2754 reason = G_("function %q+F can never be copied "
2755 "because it uses variable sized variables");
2756 goto fail;
2760 fail:
2761 fun->cannot_be_copied_reason = reason;
2762 fun->cannot_be_copied_set = true;
2763 return reason;
2767 static const char *inline_forbidden_reason;
2769 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2770 iff a function can not be inlined. Also sets the reason why. */
2772 static tree
2773 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2774 struct walk_stmt_info *wip)
2776 tree fn = (tree) wip->info;
2777 tree t;
2778 gimple stmt = gsi_stmt (*gsi);
2780 switch (gimple_code (stmt))
2782 case GIMPLE_CALL:
2783 /* Refuse to inline alloca call unless user explicitly forced so as
2784 this may change program's memory overhead drastically when the
2785 function using alloca is called in loop. In GCC present in
2786 SPEC2000 inlining into schedule_block cause it to require 2GB of
2787 RAM instead of 256MB. */
2788 if (gimple_alloca_call_p (stmt)
2789 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2791 inline_forbidden_reason
2792 = G_("function %q+F can never be inlined because it uses "
2793 "alloca (override using the always_inline attribute)");
2794 *handled_ops_p = true;
2795 return fn;
2798 t = gimple_call_fndecl (stmt);
2799 if (t == NULL_TREE)
2800 break;
2802 /* We cannot inline functions that call setjmp. */
2803 if (setjmp_call_p (t))
2805 inline_forbidden_reason
2806 = G_("function %q+F can never be inlined because it uses setjmp");
2807 *handled_ops_p = true;
2808 return t;
2811 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2812 switch (DECL_FUNCTION_CODE (t))
2814 /* We cannot inline functions that take a variable number of
2815 arguments. */
2816 case BUILT_IN_VA_START:
2817 case BUILT_IN_NEXT_ARG:
2818 case BUILT_IN_VA_END:
2819 inline_forbidden_reason
2820 = G_("function %q+F can never be inlined because it "
2821 "uses variable argument lists");
2822 *handled_ops_p = true;
2823 return t;
2825 case BUILT_IN_LONGJMP:
2826 /* We can't inline functions that call __builtin_longjmp at
2827 all. The non-local goto machinery really requires the
2828 destination be in a different function. If we allow the
2829 function calling __builtin_longjmp to be inlined into the
2830 function calling __builtin_setjmp, Things will Go Awry. */
2831 inline_forbidden_reason
2832 = G_("function %q+F can never be inlined because "
2833 "it uses setjmp-longjmp exception handling");
2834 *handled_ops_p = true;
2835 return t;
2837 case BUILT_IN_NONLOCAL_GOTO:
2838 /* Similarly. */
2839 inline_forbidden_reason
2840 = G_("function %q+F can never be inlined because "
2841 "it uses non-local goto");
2842 *handled_ops_p = true;
2843 return t;
2845 case BUILT_IN_RETURN:
2846 case BUILT_IN_APPLY_ARGS:
2847 /* If a __builtin_apply_args caller would be inlined,
2848 it would be saving arguments of the function it has
2849 been inlined into. Similarly __builtin_return would
2850 return from the function the inline has been inlined into. */
2851 inline_forbidden_reason
2852 = G_("function %q+F can never be inlined because "
2853 "it uses __builtin_return or __builtin_apply_args");
2854 *handled_ops_p = true;
2855 return t;
2857 default:
2858 break;
2860 break;
2862 case GIMPLE_GOTO:
2863 t = gimple_goto_dest (stmt);
2865 /* We will not inline a function which uses computed goto. The
2866 addresses of its local labels, which may be tucked into
2867 global storage, are of course not constant across
2868 instantiations, which causes unexpected behavior. */
2869 if (TREE_CODE (t) != LABEL_DECL)
2871 inline_forbidden_reason
2872 = G_("function %q+F can never be inlined "
2873 "because it contains a computed goto");
2874 *handled_ops_p = true;
2875 return t;
2877 break;
2879 default:
2880 break;
2883 *handled_ops_p = false;
2884 return NULL_TREE;
2887 /* Return true if FNDECL is a function that cannot be inlined into
2888 another one. */
2890 static bool
2891 inline_forbidden_p (tree fndecl)
2893 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2894 struct walk_stmt_info wi;
2895 struct pointer_set_t *visited_nodes;
2896 basic_block bb;
2897 bool forbidden_p = false;
2899 /* First check for shared reasons not to copy the code. */
2900 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2901 if (inline_forbidden_reason != NULL)
2902 return true;
2904 /* Next, walk the statements of the function looking for
2905 constraucts we can't handle, or are non-optimal for inlining. */
2906 visited_nodes = pointer_set_create ();
2907 memset (&wi, 0, sizeof (wi));
2908 wi.info = (void *) fndecl;
2909 wi.pset = visited_nodes;
2911 FOR_EACH_BB_FN (bb, fun)
2913 gimple ret;
2914 gimple_seq seq = bb_seq (bb);
2915 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
2916 forbidden_p = (ret != NULL);
2917 if (forbidden_p)
2918 break;
2921 pointer_set_destroy (visited_nodes);
2922 return forbidden_p;
2925 /* Returns nonzero if FN is a function that does not have any
2926 fundamental inline blocking properties. */
2928 bool
2929 tree_inlinable_function_p (tree fn)
2931 bool inlinable = true;
2932 bool do_warning;
2933 tree always_inline;
2935 /* If we've already decided this function shouldn't be inlined,
2936 there's no need to check again. */
2937 if (DECL_UNINLINABLE (fn))
2938 return false;
2940 /* We only warn for functions declared `inline' by the user. */
2941 do_warning = (warn_inline
2942 && DECL_DECLARED_INLINE_P (fn)
2943 && !DECL_NO_INLINE_WARNING_P (fn)
2944 && !DECL_IN_SYSTEM_HEADER (fn));
2946 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2948 if (flag_no_inline
2949 && always_inline == NULL)
2951 if (do_warning)
2952 warning (OPT_Winline, "function %q+F can never be inlined because it "
2953 "is suppressed using -fno-inline", fn);
2954 inlinable = false;
2957 /* Don't auto-inline anything that might not be bound within
2958 this unit of translation. */
2959 else if (!DECL_DECLARED_INLINE_P (fn)
2960 && DECL_REPLACEABLE_P (fn))
2961 inlinable = false;
2963 else if (!function_attribute_inlinable_p (fn))
2965 if (do_warning)
2966 warning (OPT_Winline, "function %q+F can never be inlined because it "
2967 "uses attributes conflicting with inlining", fn);
2968 inlinable = false;
2971 else if (inline_forbidden_p (fn))
2973 /* See if we should warn about uninlinable functions. Previously,
2974 some of these warnings would be issued while trying to expand
2975 the function inline, but that would cause multiple warnings
2976 about functions that would for example call alloca. But since
2977 this a property of the function, just one warning is enough.
2978 As a bonus we can now give more details about the reason why a
2979 function is not inlinable. */
2980 if (always_inline)
2981 sorry (inline_forbidden_reason, fn);
2982 else if (do_warning)
2983 warning (OPT_Winline, inline_forbidden_reason, fn);
2985 inlinable = false;
2988 /* Squirrel away the result so that we don't have to check again. */
2989 DECL_UNINLINABLE (fn) = !inlinable;
2991 return inlinable;
2994 /* Estimate the cost of a memory move. Use machine dependent
2995 word size and take possible memcpy call into account. */
2998 estimate_move_cost (tree type)
3000 HOST_WIDE_INT size;
3002 gcc_assert (!VOID_TYPE_P (type));
3004 size = int_size_in_bytes (type);
3006 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3007 /* Cost of a memcpy call, 3 arguments and the call. */
3008 return 4;
3009 else
3010 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3013 /* Returns cost of operation CODE, according to WEIGHTS */
3015 static int
3016 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3017 tree op1 ATTRIBUTE_UNUSED, tree op2)
3019 switch (code)
3021 /* These are "free" conversions, or their presumed cost
3022 is folded into other operations. */
3023 case RANGE_EXPR:
3024 CASE_CONVERT:
3025 case COMPLEX_EXPR:
3026 case PAREN_EXPR:
3027 return 0;
3029 /* Assign cost of 1 to usual operations.
3030 ??? We may consider mapping RTL costs to this. */
3031 case COND_EXPR:
3032 case VEC_COND_EXPR:
3034 case PLUS_EXPR:
3035 case POINTER_PLUS_EXPR:
3036 case MINUS_EXPR:
3037 case MULT_EXPR:
3039 case FIXED_CONVERT_EXPR:
3040 case FIX_TRUNC_EXPR:
3042 case NEGATE_EXPR:
3043 case FLOAT_EXPR:
3044 case MIN_EXPR:
3045 case MAX_EXPR:
3046 case ABS_EXPR:
3048 case LSHIFT_EXPR:
3049 case RSHIFT_EXPR:
3050 case LROTATE_EXPR:
3051 case RROTATE_EXPR:
3052 case VEC_LSHIFT_EXPR:
3053 case VEC_RSHIFT_EXPR:
3055 case BIT_IOR_EXPR:
3056 case BIT_XOR_EXPR:
3057 case BIT_AND_EXPR:
3058 case BIT_NOT_EXPR:
3060 case TRUTH_ANDIF_EXPR:
3061 case TRUTH_ORIF_EXPR:
3062 case TRUTH_AND_EXPR:
3063 case TRUTH_OR_EXPR:
3064 case TRUTH_XOR_EXPR:
3065 case TRUTH_NOT_EXPR:
3067 case LT_EXPR:
3068 case LE_EXPR:
3069 case GT_EXPR:
3070 case GE_EXPR:
3071 case EQ_EXPR:
3072 case NE_EXPR:
3073 case ORDERED_EXPR:
3074 case UNORDERED_EXPR:
3076 case UNLT_EXPR:
3077 case UNLE_EXPR:
3078 case UNGT_EXPR:
3079 case UNGE_EXPR:
3080 case UNEQ_EXPR:
3081 case LTGT_EXPR:
3083 case CONJ_EXPR:
3085 case PREDECREMENT_EXPR:
3086 case PREINCREMENT_EXPR:
3087 case POSTDECREMENT_EXPR:
3088 case POSTINCREMENT_EXPR:
3090 case REALIGN_LOAD_EXPR:
3092 case REDUC_MAX_EXPR:
3093 case REDUC_MIN_EXPR:
3094 case REDUC_PLUS_EXPR:
3095 case WIDEN_SUM_EXPR:
3096 case WIDEN_MULT_EXPR:
3097 case DOT_PROD_EXPR:
3099 case VEC_WIDEN_MULT_HI_EXPR:
3100 case VEC_WIDEN_MULT_LO_EXPR:
3101 case VEC_UNPACK_HI_EXPR:
3102 case VEC_UNPACK_LO_EXPR:
3103 case VEC_UNPACK_FLOAT_HI_EXPR:
3104 case VEC_UNPACK_FLOAT_LO_EXPR:
3105 case VEC_PACK_TRUNC_EXPR:
3106 case VEC_PACK_SAT_EXPR:
3107 case VEC_PACK_FIX_TRUNC_EXPR:
3108 case VEC_EXTRACT_EVEN_EXPR:
3109 case VEC_EXTRACT_ODD_EXPR:
3110 case VEC_INTERLEAVE_HIGH_EXPR:
3111 case VEC_INTERLEAVE_LOW_EXPR:
3113 return 1;
3115 /* Few special cases of expensive operations. This is useful
3116 to avoid inlining on functions having too many of these. */
3117 case TRUNC_DIV_EXPR:
3118 case CEIL_DIV_EXPR:
3119 case FLOOR_DIV_EXPR:
3120 case ROUND_DIV_EXPR:
3121 case EXACT_DIV_EXPR:
3122 case TRUNC_MOD_EXPR:
3123 case CEIL_MOD_EXPR:
3124 case FLOOR_MOD_EXPR:
3125 case ROUND_MOD_EXPR:
3126 case RDIV_EXPR:
3127 if (TREE_CODE (op2) != INTEGER_CST)
3128 return weights->div_mod_cost;
3129 return 1;
3131 default:
3132 /* We expect a copy assignment with no operator. */
3133 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3134 return 0;
3139 /* Estimate number of instructions that will be created by expanding
3140 the statements in the statement sequence STMTS.
3141 WEIGHTS contains weights attributed to various constructs. */
3143 static
3144 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3146 int cost;
3147 gimple_stmt_iterator gsi;
3149 cost = 0;
3150 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3151 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3153 return cost;
3157 /* Estimate number of instructions that will be created by expanding STMT.
3158 WEIGHTS contains weights attributed to various constructs. */
3161 estimate_num_insns (gimple stmt, eni_weights *weights)
3163 unsigned cost, i;
3164 enum gimple_code code = gimple_code (stmt);
3165 tree lhs;
3166 tree rhs;
3168 switch (code)
3170 case GIMPLE_ASSIGN:
3171 /* Try to estimate the cost of assignments. We have three cases to
3172 deal with:
3173 1) Simple assignments to registers;
3174 2) Stores to things that must live in memory. This includes
3175 "normal" stores to scalars, but also assignments of large
3176 structures, or constructors of big arrays;
3178 Let us look at the first two cases, assuming we have "a = b + C":
3179 <GIMPLE_ASSIGN <var_decl "a">
3180 <plus_expr <var_decl "b"> <constant C>>
3181 If "a" is a GIMPLE register, the assignment to it is free on almost
3182 any target, because "a" usually ends up in a real register. Hence
3183 the only cost of this expression comes from the PLUS_EXPR, and we
3184 can ignore the GIMPLE_ASSIGN.
3185 If "a" is not a GIMPLE register, the assignment to "a" will most
3186 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3187 of moving something into "a", which we compute using the function
3188 estimate_move_cost. */
3189 lhs = gimple_assign_lhs (stmt);
3190 rhs = gimple_assign_rhs1 (stmt);
3192 /* EH magic stuff is most probably going to be optimized out.
3193 We rarely really need to save EH info for unwinding
3194 nested exceptions. */
3195 if (TREE_CODE (lhs) == FILTER_EXPR
3196 || TREE_CODE (lhs) == EXC_PTR_EXPR
3197 || TREE_CODE (rhs) == FILTER_EXPR
3198 || TREE_CODE (rhs) == EXC_PTR_EXPR)
3199 return 0;
3200 if (is_gimple_reg (lhs))
3201 cost = 0;
3202 else
3203 cost = estimate_move_cost (TREE_TYPE (lhs));
3205 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3206 cost += estimate_move_cost (TREE_TYPE (rhs));
3208 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3209 gimple_assign_rhs1 (stmt),
3210 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3211 == GIMPLE_BINARY_RHS
3212 ? gimple_assign_rhs2 (stmt) : NULL);
3213 break;
3215 case GIMPLE_COND:
3216 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3217 gimple_op (stmt, 0),
3218 gimple_op (stmt, 1));
3219 break;
3221 case GIMPLE_SWITCH:
3222 /* Take into account cost of the switch + guess 2 conditional jumps for
3223 each case label.
3225 TODO: once the switch expansion logic is sufficiently separated, we can
3226 do better job on estimating cost of the switch. */
3227 if (weights->time_based)
3228 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3229 else
3230 cost = gimple_switch_num_labels (stmt) * 2;
3231 break;
3233 case GIMPLE_CALL:
3235 tree decl = gimple_call_fndecl (stmt);
3236 tree addr = gimple_call_fn (stmt);
3237 tree funtype = TREE_TYPE (addr);
3239 if (POINTER_TYPE_P (funtype))
3240 funtype = TREE_TYPE (funtype);
3242 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3243 cost = weights->target_builtin_call_cost;
3244 else
3245 cost = weights->call_cost;
3247 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3248 switch (DECL_FUNCTION_CODE (decl))
3250 case BUILT_IN_CONSTANT_P:
3251 return 0;
3252 case BUILT_IN_EXPECT:
3253 return 0;
3255 /* Prefetch instruction is not expensive. */
3256 case BUILT_IN_PREFETCH:
3257 cost = weights->target_builtin_call_cost;
3258 break;
3260 default:
3261 break;
3264 if (decl)
3265 funtype = TREE_TYPE (decl);
3267 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3268 cost += estimate_move_cost (TREE_TYPE (funtype));
3269 /* Our cost must be kept in sync with
3270 cgraph_estimate_size_after_inlining that does use function
3271 declaration to figure out the arguments. */
3272 if (decl && DECL_ARGUMENTS (decl))
3274 tree arg;
3275 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3276 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3277 cost += estimate_move_cost (TREE_TYPE (arg));
3279 else if (funtype && prototype_p (funtype))
3281 tree t;
3282 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3283 t = TREE_CHAIN (t))
3284 if (!VOID_TYPE_P (TREE_VALUE (t)))
3285 cost += estimate_move_cost (TREE_VALUE (t));
3287 else
3289 for (i = 0; i < gimple_call_num_args (stmt); i++)
3291 tree arg = gimple_call_arg (stmt, i);
3292 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3293 cost += estimate_move_cost (TREE_TYPE (arg));
3297 break;
3300 case GIMPLE_GOTO:
3301 case GIMPLE_LABEL:
3302 case GIMPLE_NOP:
3303 case GIMPLE_PHI:
3304 case GIMPLE_RETURN:
3305 case GIMPLE_PREDICT:
3306 case GIMPLE_DEBUG:
3307 return 0;
3309 case GIMPLE_ASM:
3310 case GIMPLE_RESX:
3311 return 1;
3313 case GIMPLE_BIND:
3314 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3316 case GIMPLE_EH_FILTER:
3317 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3319 case GIMPLE_CATCH:
3320 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3322 case GIMPLE_TRY:
3323 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3324 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3326 /* OpenMP directives are generally very expensive. */
3328 case GIMPLE_OMP_RETURN:
3329 case GIMPLE_OMP_SECTIONS_SWITCH:
3330 case GIMPLE_OMP_ATOMIC_STORE:
3331 case GIMPLE_OMP_CONTINUE:
3332 /* ...except these, which are cheap. */
3333 return 0;
3335 case GIMPLE_OMP_ATOMIC_LOAD:
3336 return weights->omp_cost;
3338 case GIMPLE_OMP_FOR:
3339 return (weights->omp_cost
3340 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3341 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3343 case GIMPLE_OMP_PARALLEL:
3344 case GIMPLE_OMP_TASK:
3345 case GIMPLE_OMP_CRITICAL:
3346 case GIMPLE_OMP_MASTER:
3347 case GIMPLE_OMP_ORDERED:
3348 case GIMPLE_OMP_SECTION:
3349 case GIMPLE_OMP_SECTIONS:
3350 case GIMPLE_OMP_SINGLE:
3351 return (weights->omp_cost
3352 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3354 default:
3355 gcc_unreachable ();
3358 return cost;
3361 /* Estimate number of instructions that will be created by expanding
3362 function FNDECL. WEIGHTS contains weights attributed to various
3363 constructs. */
3366 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3368 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3369 gimple_stmt_iterator bsi;
3370 basic_block bb;
3371 int n = 0;
3373 gcc_assert (my_function && my_function->cfg);
3374 FOR_EACH_BB_FN (bb, my_function)
3376 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3377 n += estimate_num_insns (gsi_stmt (bsi), weights);
3380 return n;
3384 /* Initializes weights used by estimate_num_insns. */
3386 void
3387 init_inline_once (void)
3389 eni_size_weights.call_cost = 1;
3390 eni_size_weights.target_builtin_call_cost = 1;
3391 eni_size_weights.div_mod_cost = 1;
3392 eni_size_weights.omp_cost = 40;
3393 eni_size_weights.time_based = false;
3395 /* Estimating time for call is difficult, since we have no idea what the
3396 called function does. In the current uses of eni_time_weights,
3397 underestimating the cost does less harm than overestimating it, so
3398 we choose a rather small value here. */
3399 eni_time_weights.call_cost = 10;
3400 eni_time_weights.target_builtin_call_cost = 10;
3401 eni_time_weights.div_mod_cost = 10;
3402 eni_time_weights.omp_cost = 40;
3403 eni_time_weights.time_based = true;
3406 /* Estimate the number of instructions in a gimple_seq. */
3409 count_insns_seq (gimple_seq seq, eni_weights *weights)
3411 gimple_stmt_iterator gsi;
3412 int n = 0;
3413 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3414 n += estimate_num_insns (gsi_stmt (gsi), weights);
3416 return n;
3420 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3422 static void
3423 prepend_lexical_block (tree current_block, tree new_block)
3425 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3426 BLOCK_SUBBLOCKS (current_block) = new_block;
3427 BLOCK_SUPERCONTEXT (new_block) = current_block;
3430 /* Fetch callee declaration from the call graph edge going from NODE and
3431 associated with STMR call statement. Return NULL_TREE if not found. */
3432 static tree
3433 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3435 struct cgraph_edge *cs;
3437 cs = cgraph_edge (node, stmt);
3438 if (cs)
3439 return cs->callee->decl;
3441 return NULL_TREE;
3444 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3446 static bool
3447 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3449 tree retvar, use_retvar;
3450 tree fn;
3451 struct pointer_map_t *st, *dst;
3452 tree return_slot;
3453 tree modify_dest;
3454 location_t saved_location;
3455 struct cgraph_edge *cg_edge;
3456 cgraph_inline_failed_t reason;
3457 basic_block return_block;
3458 edge e;
3459 gimple_stmt_iterator gsi, stmt_gsi;
3460 bool successfully_inlined = FALSE;
3461 bool purge_dead_abnormal_edges;
3462 tree t_step;
3463 tree var;
3465 /* Set input_location here so we get the right instantiation context
3466 if we call instantiate_decl from inlinable_function_p. */
3467 saved_location = input_location;
3468 if (gimple_has_location (stmt))
3469 input_location = gimple_location (stmt);
3471 /* From here on, we're only interested in CALL_EXPRs. */
3472 if (gimple_code (stmt) != GIMPLE_CALL)
3473 goto egress;
3475 /* First, see if we can figure out what function is being called.
3476 If we cannot, then there is no hope of inlining the function. */
3477 fn = gimple_call_fndecl (stmt);
3478 if (!fn)
3480 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3481 if (!fn)
3482 goto egress;
3485 /* Turn forward declarations into real ones. */
3486 fn = cgraph_node (fn)->decl;
3488 /* If FN is a declaration of a function in a nested scope that was
3489 globally declared inline, we don't set its DECL_INITIAL.
3490 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3491 C++ front-end uses it for cdtors to refer to their internal
3492 declarations, that are not real functions. Fortunately those
3493 don't have trees to be saved, so we can tell by checking their
3494 gimple_body. */
3495 if (!DECL_INITIAL (fn)
3496 && DECL_ABSTRACT_ORIGIN (fn)
3497 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3498 fn = DECL_ABSTRACT_ORIGIN (fn);
3500 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3501 Kill this check once this is fixed. */
3502 if (!id->dst_node->analyzed)
3503 goto egress;
3505 cg_edge = cgraph_edge (id->dst_node, stmt);
3507 /* Don't try to inline functions that are not well-suited to
3508 inlining. */
3509 if (!cgraph_inline_p (cg_edge, &reason))
3511 /* If this call was originally indirect, we do not want to emit any
3512 inlining related warnings or sorry messages because there are no
3513 guarantees regarding those. */
3514 if (cg_edge->indirect_call)
3515 goto egress;
3517 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3518 /* Avoid warnings during early inline pass. */
3519 && cgraph_global_info_ready)
3521 sorry ("inlining failed in call to %q+F: %s", fn,
3522 cgraph_inline_failed_string (reason));
3523 sorry ("called from here");
3525 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3526 && !DECL_IN_SYSTEM_HEADER (fn)
3527 && reason != CIF_UNSPECIFIED
3528 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3529 /* Avoid warnings during early inline pass. */
3530 && cgraph_global_info_ready)
3532 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3533 fn, cgraph_inline_failed_string (reason));
3534 warning (OPT_Winline, "called from here");
3536 goto egress;
3538 fn = cg_edge->callee->decl;
3540 #ifdef ENABLE_CHECKING
3541 if (cg_edge->callee->decl != id->dst_node->decl)
3542 verify_cgraph_node (cg_edge->callee);
3543 #endif
3545 /* We will be inlining this callee. */
3546 id->eh_region = lookup_stmt_eh_region (stmt);
3548 /* Split the block holding the GIMPLE_CALL. */
3549 e = split_block (bb, stmt);
3550 bb = e->src;
3551 return_block = e->dest;
3552 remove_edge (e);
3554 /* split_block splits after the statement; work around this by
3555 moving the call into the second block manually. Not pretty,
3556 but seems easier than doing the CFG manipulation by hand
3557 when the GIMPLE_CALL is in the last statement of BB. */
3558 stmt_gsi = gsi_last_bb (bb);
3559 gsi_remove (&stmt_gsi, false);
3561 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3562 been the source of abnormal edges. In this case, schedule
3563 the removal of dead abnormal edges. */
3564 gsi = gsi_start_bb (return_block);
3565 if (gsi_end_p (gsi))
3567 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3568 purge_dead_abnormal_edges = true;
3570 else
3572 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3573 purge_dead_abnormal_edges = false;
3576 stmt_gsi = gsi_start_bb (return_block);
3578 /* Build a block containing code to initialize the arguments, the
3579 actual inline expansion of the body, and a label for the return
3580 statements within the function to jump to. The type of the
3581 statement expression is the return type of the function call. */
3582 id->block = make_node (BLOCK);
3583 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3584 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3585 prepend_lexical_block (gimple_block (stmt), id->block);
3587 /* Local declarations will be replaced by their equivalents in this
3588 map. */
3589 st = id->decl_map;
3590 id->decl_map = pointer_map_create ();
3591 dst = id->debug_map;
3592 id->debug_map = NULL;
3594 /* Record the function we are about to inline. */
3595 id->src_fn = fn;
3596 id->src_node = cg_edge->callee;
3597 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3598 id->gimple_call = stmt;
3600 gcc_assert (!id->src_cfun->after_inlining);
3602 id->entry_bb = bb;
3603 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3605 gimple_stmt_iterator si = gsi_last_bb (bb);
3606 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3607 NOT_TAKEN),
3608 GSI_NEW_STMT);
3610 initialize_inlined_parameters (id, stmt, fn, bb);
3612 if (DECL_INITIAL (fn))
3613 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3615 /* Return statements in the function body will be replaced by jumps
3616 to the RET_LABEL. */
3617 gcc_assert (DECL_INITIAL (fn));
3618 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3620 /* Find the LHS to which the result of this call is assigned. */
3621 return_slot = NULL;
3622 if (gimple_call_lhs (stmt))
3624 modify_dest = gimple_call_lhs (stmt);
3626 /* The function which we are inlining might not return a value,
3627 in which case we should issue a warning that the function
3628 does not return a value. In that case the optimizers will
3629 see that the variable to which the value is assigned was not
3630 initialized. We do not want to issue a warning about that
3631 uninitialized variable. */
3632 if (DECL_P (modify_dest))
3633 TREE_NO_WARNING (modify_dest) = 1;
3635 if (gimple_call_return_slot_opt_p (stmt))
3637 return_slot = modify_dest;
3638 modify_dest = NULL;
3641 else
3642 modify_dest = NULL;
3644 /* If we are inlining a call to the C++ operator new, we don't want
3645 to use type based alias analysis on the return value. Otherwise
3646 we may get confused if the compiler sees that the inlined new
3647 function returns a pointer which was just deleted. See bug
3648 33407. */
3649 if (DECL_IS_OPERATOR_NEW (fn))
3651 return_slot = NULL;
3652 modify_dest = NULL;
3655 /* Declare the return variable for the function. */
3656 retvar = declare_return_variable (id, return_slot, modify_dest, &use_retvar);
3658 /* Add local vars in this inlined callee to caller. */
3659 t_step = id->src_cfun->local_decls;
3660 for (; t_step; t_step = TREE_CHAIN (t_step))
3662 var = TREE_VALUE (t_step);
3663 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3665 if (var_ann (var) && add_referenced_var (var))
3666 cfun->local_decls = tree_cons (NULL_TREE, var,
3667 cfun->local_decls);
3669 else if (!can_be_nonlocal (var, id))
3670 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3671 cfun->local_decls);
3674 /* This is it. Duplicate the callee body. Assume callee is
3675 pre-gimplified. Note that we must not alter the caller
3676 function in any way before this point, as this CALL_EXPR may be
3677 a self-referential call; if we're calling ourselves, we need to
3678 duplicate our body before altering anything. */
3679 copy_body (id, bb->count, bb->frequency, bb, return_block);
3681 /* Reset the escaped and callused solutions. */
3682 if (cfun->gimple_df)
3684 pt_solution_reset (&cfun->gimple_df->escaped);
3685 pt_solution_reset (&cfun->gimple_df->callused);
3688 /* Clean up. */
3689 if (id->debug_map)
3691 pointer_map_destroy (id->debug_map);
3692 id->debug_map = dst;
3694 pointer_map_destroy (id->decl_map);
3695 id->decl_map = st;
3697 /* Unlink the calls virtual operands before replacing it. */
3698 unlink_stmt_vdef (stmt);
3700 /* If the inlined function returns a result that we care about,
3701 substitute the GIMPLE_CALL with an assignment of the return
3702 variable to the LHS of the call. That is, if STMT was
3703 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3704 if (use_retvar && gimple_call_lhs (stmt))
3706 gimple old_stmt = stmt;
3707 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3708 gsi_replace (&stmt_gsi, stmt, false);
3709 if (gimple_in_ssa_p (cfun))
3710 mark_symbols_for_renaming (stmt);
3711 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3713 else
3715 /* Handle the case of inlining a function with no return
3716 statement, which causes the return value to become undefined. */
3717 if (gimple_call_lhs (stmt)
3718 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3720 tree name = gimple_call_lhs (stmt);
3721 tree var = SSA_NAME_VAR (name);
3722 tree def = gimple_default_def (cfun, var);
3724 if (def)
3726 /* If the variable is used undefined, make this name
3727 undefined via a move. */
3728 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3729 gsi_replace (&stmt_gsi, stmt, true);
3731 else
3733 /* Otherwise make this variable undefined. */
3734 gsi_remove (&stmt_gsi, true);
3735 set_default_def (var, name);
3736 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3739 else
3740 gsi_remove (&stmt_gsi, true);
3743 if (purge_dead_abnormal_edges)
3744 gimple_purge_dead_abnormal_call_edges (return_block);
3746 /* If the value of the new expression is ignored, that's OK. We
3747 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3748 the equivalent inlined version either. */
3749 if (is_gimple_assign (stmt))
3751 gcc_assert (gimple_assign_single_p (stmt)
3752 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3753 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3756 /* Output the inlining info for this abstract function, since it has been
3757 inlined. If we don't do this now, we can lose the information about the
3758 variables in the function when the blocks get blown away as soon as we
3759 remove the cgraph node. */
3760 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3762 /* Update callgraph if needed. */
3763 cgraph_remove_node (cg_edge->callee);
3765 id->block = NULL_TREE;
3766 successfully_inlined = TRUE;
3768 egress:
3769 input_location = saved_location;
3770 return successfully_inlined;
3773 /* Expand call statements reachable from STMT_P.
3774 We can only have CALL_EXPRs as the "toplevel" tree code or nested
3775 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
3776 unfortunately not use that function here because we need a pointer
3777 to the CALL_EXPR, not the tree itself. */
3779 static bool
3780 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3782 gimple_stmt_iterator gsi;
3784 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3786 gimple stmt = gsi_stmt (gsi);
3788 if (is_gimple_call (stmt)
3789 && expand_call_inline (bb, stmt, id))
3790 return true;
3793 return false;
3797 /* Walk all basic blocks created after FIRST and try to fold every statement
3798 in the STATEMENTS pointer set. */
3800 static void
3801 fold_marked_statements (int first, struct pointer_set_t *statements)
3803 for (; first < n_basic_blocks; first++)
3804 if (BASIC_BLOCK (first))
3806 gimple_stmt_iterator gsi;
3808 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3809 !gsi_end_p (gsi);
3810 gsi_next (&gsi))
3811 if (pointer_set_contains (statements, gsi_stmt (gsi)))
3813 gimple old_stmt = gsi_stmt (gsi);
3814 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3816 if (fold_stmt (&gsi))
3818 /* Re-read the statement from GSI as fold_stmt() may
3819 have changed it. */
3820 gimple new_stmt = gsi_stmt (gsi);
3821 update_stmt (new_stmt);
3823 if (is_gimple_call (old_stmt)
3824 || is_gimple_call (new_stmt))
3825 cgraph_update_edges_for_call_stmt (old_stmt, old_decl, new_stmt);
3827 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
3828 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3834 /* Return true if BB has at least one abnormal outgoing edge. */
3836 static inline bool
3837 has_abnormal_outgoing_edge_p (basic_block bb)
3839 edge e;
3840 edge_iterator ei;
3842 FOR_EACH_EDGE (e, ei, bb->succs)
3843 if (e->flags & EDGE_ABNORMAL)
3844 return true;
3846 return false;
3849 /* Expand calls to inline functions in the body of FN. */
3851 unsigned int
3852 optimize_inline_calls (tree fn)
3854 copy_body_data id;
3855 tree prev_fn;
3856 basic_block bb;
3857 int last = n_basic_blocks;
3858 struct gimplify_ctx gctx;
3860 /* There is no point in performing inlining if errors have already
3861 occurred -- and we might crash if we try to inline invalid
3862 code. */
3863 if (errorcount || sorrycount)
3864 return 0;
3866 /* Clear out ID. */
3867 memset (&id, 0, sizeof (id));
3869 id.src_node = id.dst_node = cgraph_node (fn);
3870 id.dst_fn = fn;
3871 /* Or any functions that aren't finished yet. */
3872 prev_fn = NULL_TREE;
3873 if (current_function_decl)
3875 id.dst_fn = current_function_decl;
3876 prev_fn = current_function_decl;
3879 id.copy_decl = copy_decl_maybe_to_var;
3880 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3881 id.transform_new_cfg = false;
3882 id.transform_return_to_modify = true;
3883 id.transform_lang_insert_block = NULL;
3884 id.statements_to_fold = pointer_set_create ();
3886 push_gimplify_context (&gctx);
3888 /* We make no attempts to keep dominance info up-to-date. */
3889 free_dominance_info (CDI_DOMINATORS);
3890 free_dominance_info (CDI_POST_DOMINATORS);
3892 /* Register specific gimple functions. */
3893 gimple_register_cfg_hooks ();
3895 /* Reach the trees by walking over the CFG, and note the
3896 enclosing basic-blocks in the call edges. */
3897 /* We walk the blocks going forward, because inlined function bodies
3898 will split id->current_basic_block, and the new blocks will
3899 follow it; we'll trudge through them, processing their CALL_EXPRs
3900 along the way. */
3901 FOR_EACH_BB (bb)
3902 gimple_expand_calls_inline (bb, &id);
3904 pop_gimplify_context (NULL);
3906 #ifdef ENABLE_CHECKING
3908 struct cgraph_edge *e;
3910 verify_cgraph_node (id.dst_node);
3912 /* Double check that we inlined everything we are supposed to inline. */
3913 for (e = id.dst_node->callees; e; e = e->next_callee)
3914 gcc_assert (e->inline_failed);
3916 #endif
3918 /* Fold the statements before compacting/renumbering the basic blocks. */
3919 fold_marked_statements (last, id.statements_to_fold);
3920 pointer_set_destroy (id.statements_to_fold);
3922 gcc_assert (!id.debug_stmts);
3924 /* Renumber the (code) basic_blocks consecutively. */
3925 compact_blocks ();
3926 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3927 number_blocks (fn);
3929 fold_cond_expr_cond ();
3930 delete_unreachable_blocks_update_callgraph (&id);
3931 #ifdef ENABLE_CHECKING
3932 verify_cgraph_node (id.dst_node);
3933 #endif
3935 /* It would be nice to check SSA/CFG/statement consistency here, but it is
3936 not possible yet - the IPA passes might make various functions to not
3937 throw and they don't care to proactively update local EH info. This is
3938 done later in fixup_cfg pass that also execute the verification. */
3939 return (TODO_update_ssa
3940 | TODO_cleanup_cfg
3941 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
3942 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
3945 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
3947 tree
3948 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3950 enum tree_code code = TREE_CODE (*tp);
3951 enum tree_code_class cl = TREE_CODE_CLASS (code);
3953 /* We make copies of most nodes. */
3954 if (IS_EXPR_CODE_CLASS (cl)
3955 || code == TREE_LIST
3956 || code == TREE_VEC
3957 || code == TYPE_DECL
3958 || code == OMP_CLAUSE)
3960 /* Because the chain gets clobbered when we make a copy, we save it
3961 here. */
3962 tree chain = NULL_TREE, new_tree;
3964 chain = TREE_CHAIN (*tp);
3966 /* Copy the node. */
3967 new_tree = copy_node (*tp);
3969 /* Propagate mudflap marked-ness. */
3970 if (flag_mudflap && mf_marked_p (*tp))
3971 mf_mark (new_tree);
3973 *tp = new_tree;
3975 /* Now, restore the chain, if appropriate. That will cause
3976 walk_tree to walk into the chain as well. */
3977 if (code == PARM_DECL
3978 || code == TREE_LIST
3979 || code == OMP_CLAUSE)
3980 TREE_CHAIN (*tp) = chain;
3982 /* For now, we don't update BLOCKs when we make copies. So, we
3983 have to nullify all BIND_EXPRs. */
3984 if (TREE_CODE (*tp) == BIND_EXPR)
3985 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
3987 else if (code == CONSTRUCTOR)
3989 /* CONSTRUCTOR nodes need special handling because
3990 we need to duplicate the vector of elements. */
3991 tree new_tree;
3993 new_tree = copy_node (*tp);
3995 /* Propagate mudflap marked-ness. */
3996 if (flag_mudflap && mf_marked_p (*tp))
3997 mf_mark (new_tree);
3999 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4000 CONSTRUCTOR_ELTS (*tp));
4001 *tp = new_tree;
4003 else if (TREE_CODE_CLASS (code) == tcc_type)
4004 *walk_subtrees = 0;
4005 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4006 *walk_subtrees = 0;
4007 else if (TREE_CODE_CLASS (code) == tcc_constant)
4008 *walk_subtrees = 0;
4009 else
4010 gcc_assert (code != STATEMENT_LIST);
4011 return NULL_TREE;
4014 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4015 information indicating to what new SAVE_EXPR this one should be mapped,
4016 use that one. Otherwise, create a new node and enter it in ST. FN is
4017 the function into which the copy will be placed. */
4019 static void
4020 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4022 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4023 tree *n;
4024 tree t;
4026 /* See if we already encountered this SAVE_EXPR. */
4027 n = (tree *) pointer_map_contains (st, *tp);
4029 /* If we didn't already remap this SAVE_EXPR, do so now. */
4030 if (!n)
4032 t = copy_node (*tp);
4034 /* Remember this SAVE_EXPR. */
4035 *pointer_map_insert (st, *tp) = t;
4036 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4037 *pointer_map_insert (st, t) = t;
4039 else
4041 /* We've already walked into this SAVE_EXPR; don't do it again. */
4042 *walk_subtrees = 0;
4043 t = *n;
4046 /* Replace this SAVE_EXPR with the copy. */
4047 *tp = t;
4050 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4051 copies the declaration and enters it in the splay_tree in DATA (which is
4052 really an `copy_body_data *'). */
4054 static tree
4055 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4056 void *data)
4058 copy_body_data *id = (copy_body_data *) data;
4060 /* Don't walk into types. */
4061 if (TYPE_P (*tp))
4062 *walk_subtrees = 0;
4064 else if (TREE_CODE (*tp) == LABEL_EXPR)
4066 tree decl = TREE_OPERAND (*tp, 0);
4068 /* Copy the decl and remember the copy. */
4069 insert_decl_map (id, decl, id->copy_decl (decl, id));
4072 return NULL_TREE;
4075 /* Perform any modifications to EXPR required when it is unsaved. Does
4076 not recurse into EXPR's subtrees. */
4078 static void
4079 unsave_expr_1 (tree expr)
4081 switch (TREE_CODE (expr))
4083 case TARGET_EXPR:
4084 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4085 It's OK for this to happen if it was part of a subtree that
4086 isn't immediately expanded, such as operand 2 of another
4087 TARGET_EXPR. */
4088 if (TREE_OPERAND (expr, 1))
4089 break;
4091 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4092 TREE_OPERAND (expr, 3) = NULL_TREE;
4093 break;
4095 default:
4096 break;
4100 /* Called via walk_tree when an expression is unsaved. Using the
4101 splay_tree pointed to by ST (which is really a `splay_tree'),
4102 remaps all local declarations to appropriate replacements. */
4104 static tree
4105 unsave_r (tree *tp, int *walk_subtrees, void *data)
4107 copy_body_data *id = (copy_body_data *) data;
4108 struct pointer_map_t *st = id->decl_map;
4109 tree *n;
4111 /* Only a local declaration (variable or label). */
4112 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4113 || TREE_CODE (*tp) == LABEL_DECL)
4115 /* Lookup the declaration. */
4116 n = (tree *) pointer_map_contains (st, *tp);
4118 /* If it's there, remap it. */
4119 if (n)
4120 *tp = *n;
4123 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4124 gcc_unreachable ();
4125 else if (TREE_CODE (*tp) == BIND_EXPR)
4126 copy_bind_expr (tp, walk_subtrees, id);
4127 else if (TREE_CODE (*tp) == SAVE_EXPR
4128 || TREE_CODE (*tp) == TARGET_EXPR)
4129 remap_save_expr (tp, st, walk_subtrees);
4130 else
4132 copy_tree_r (tp, walk_subtrees, NULL);
4134 /* Do whatever unsaving is required. */
4135 unsave_expr_1 (*tp);
4138 /* Keep iterating. */
4139 return NULL_TREE;
4142 /* Copies everything in EXPR and replaces variables, labels
4143 and SAVE_EXPRs local to EXPR. */
4145 tree
4146 unsave_expr_now (tree expr)
4148 copy_body_data id;
4150 /* There's nothing to do for NULL_TREE. */
4151 if (expr == 0)
4152 return expr;
4154 /* Set up ID. */
4155 memset (&id, 0, sizeof (id));
4156 id.src_fn = current_function_decl;
4157 id.dst_fn = current_function_decl;
4158 id.decl_map = pointer_map_create ();
4159 id.debug_map = NULL;
4161 id.copy_decl = copy_decl_no_change;
4162 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4163 id.transform_new_cfg = false;
4164 id.transform_return_to_modify = false;
4165 id.transform_lang_insert_block = NULL;
4167 /* Walk the tree once to find local labels. */
4168 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4170 /* Walk the tree again, copying, remapping, and unsaving. */
4171 walk_tree (&expr, unsave_r, &id, NULL);
4173 /* Clean up. */
4174 pointer_map_destroy (id.decl_map);
4175 if (id.debug_map)
4176 pointer_map_destroy (id.debug_map);
4178 return expr;
4181 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4182 label, copies the declaration and enters it in the splay_tree in DATA (which
4183 is really a 'copy_body_data *'. */
4185 static tree
4186 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4187 bool *handled_ops_p ATTRIBUTE_UNUSED,
4188 struct walk_stmt_info *wi)
4190 copy_body_data *id = (copy_body_data *) wi->info;
4191 gimple stmt = gsi_stmt (*gsip);
4193 if (gimple_code (stmt) == GIMPLE_LABEL)
4195 tree decl = gimple_label_label (stmt);
4197 /* Copy the decl and remember the copy. */
4198 insert_decl_map (id, decl, id->copy_decl (decl, id));
4201 return NULL_TREE;
4205 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4206 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4207 remaps all local declarations to appropriate replacements in gimple
4208 operands. */
4210 static tree
4211 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4213 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4214 copy_body_data *id = (copy_body_data *) wi->info;
4215 struct pointer_map_t *st = id->decl_map;
4216 tree *n;
4217 tree expr = *tp;
4219 /* Only a local declaration (variable or label). */
4220 if ((TREE_CODE (expr) == VAR_DECL
4221 && !TREE_STATIC (expr))
4222 || TREE_CODE (expr) == LABEL_DECL)
4224 /* Lookup the declaration. */
4225 n = (tree *) pointer_map_contains (st, expr);
4227 /* If it's there, remap it. */
4228 if (n)
4229 *tp = *n;
4230 *walk_subtrees = 0;
4232 else if (TREE_CODE (expr) == STATEMENT_LIST
4233 || TREE_CODE (expr) == BIND_EXPR
4234 || TREE_CODE (expr) == SAVE_EXPR)
4235 gcc_unreachable ();
4236 else if (TREE_CODE (expr) == TARGET_EXPR)
4238 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4239 It's OK for this to happen if it was part of a subtree that
4240 isn't immediately expanded, such as operand 2 of another
4241 TARGET_EXPR. */
4242 if (!TREE_OPERAND (expr, 1))
4244 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4245 TREE_OPERAND (expr, 3) = NULL_TREE;
4249 /* Keep iterating. */
4250 return NULL_TREE;
4254 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4255 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4256 remaps all local declarations to appropriate replacements in gimple
4257 statements. */
4259 static tree
4260 replace_locals_stmt (gimple_stmt_iterator *gsip,
4261 bool *handled_ops_p ATTRIBUTE_UNUSED,
4262 struct walk_stmt_info *wi)
4264 copy_body_data *id = (copy_body_data *) wi->info;
4265 gimple stmt = gsi_stmt (*gsip);
4267 if (gimple_code (stmt) == GIMPLE_BIND)
4269 tree block = gimple_bind_block (stmt);
4271 if (block)
4273 remap_block (&block, id);
4274 gimple_bind_set_block (stmt, block);
4277 /* This will remap a lot of the same decls again, but this should be
4278 harmless. */
4279 if (gimple_bind_vars (stmt))
4280 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4283 /* Keep iterating. */
4284 return NULL_TREE;
4288 /* Copies everything in SEQ and replaces variables and labels local to
4289 current_function_decl. */
4291 gimple_seq
4292 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4294 copy_body_data id;
4295 struct walk_stmt_info wi;
4296 struct pointer_set_t *visited;
4297 gimple_seq copy;
4299 /* There's nothing to do for NULL_TREE. */
4300 if (seq == NULL)
4301 return seq;
4303 /* Set up ID. */
4304 memset (&id, 0, sizeof (id));
4305 id.src_fn = current_function_decl;
4306 id.dst_fn = current_function_decl;
4307 id.decl_map = pointer_map_create ();
4308 id.debug_map = NULL;
4310 id.copy_decl = copy_decl_no_change;
4311 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4312 id.transform_new_cfg = false;
4313 id.transform_return_to_modify = false;
4314 id.transform_lang_insert_block = NULL;
4316 /* Walk the tree once to find local labels. */
4317 memset (&wi, 0, sizeof (wi));
4318 visited = pointer_set_create ();
4319 wi.info = &id;
4320 wi.pset = visited;
4321 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4322 pointer_set_destroy (visited);
4324 copy = gimple_seq_copy (seq);
4326 /* Walk the copy, remapping decls. */
4327 memset (&wi, 0, sizeof (wi));
4328 wi.info = &id;
4329 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4331 /* Clean up. */
4332 pointer_map_destroy (id.decl_map);
4333 if (id.debug_map)
4334 pointer_map_destroy (id.debug_map);
4336 return copy;
4340 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4342 static tree
4343 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4345 if (*tp == data)
4346 return (tree) data;
4347 else
4348 return NULL;
4351 bool
4352 debug_find_tree (tree top, tree search)
4354 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4358 /* Declare the variables created by the inliner. Add all the variables in
4359 VARS to BIND_EXPR. */
4361 static void
4362 declare_inline_vars (tree block, tree vars)
4364 tree t;
4365 for (t = vars; t; t = TREE_CHAIN (t))
4367 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4368 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4369 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4372 if (block)
4373 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4376 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4377 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4378 VAR_DECL translation. */
4380 static tree
4381 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4383 /* Don't generate debug information for the copy if we wouldn't have
4384 generated it for the copy either. */
4385 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4386 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4388 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4389 declaration inspired this copy. */
4390 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4392 /* The new variable/label has no RTL, yet. */
4393 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4394 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4395 SET_DECL_RTL (copy, NULL_RTX);
4397 /* These args would always appear unused, if not for this. */
4398 TREE_USED (copy) = 1;
4400 /* Set the context for the new declaration. */
4401 if (!DECL_CONTEXT (decl))
4402 /* Globals stay global. */
4404 else if (DECL_CONTEXT (decl) != id->src_fn)
4405 /* Things that weren't in the scope of the function we're inlining
4406 from aren't in the scope we're inlining to, either. */
4408 else if (TREE_STATIC (decl))
4409 /* Function-scoped static variables should stay in the original
4410 function. */
4412 else
4413 /* Ordinary automatic local variables are now in the scope of the
4414 new function. */
4415 DECL_CONTEXT (copy) = id->dst_fn;
4417 return copy;
4420 static tree
4421 copy_decl_to_var (tree decl, copy_body_data *id)
4423 tree copy, type;
4425 gcc_assert (TREE_CODE (decl) == PARM_DECL
4426 || TREE_CODE (decl) == RESULT_DECL);
4428 type = TREE_TYPE (decl);
4430 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4431 VAR_DECL, DECL_NAME (decl), type);
4432 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4433 TREE_READONLY (copy) = TREE_READONLY (decl);
4434 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4435 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4437 return copy_decl_for_dup_finish (id, decl, copy);
4440 /* Like copy_decl_to_var, but create a return slot object instead of a
4441 pointer variable for return by invisible reference. */
4443 static tree
4444 copy_result_decl_to_var (tree decl, copy_body_data *id)
4446 tree copy, type;
4448 gcc_assert (TREE_CODE (decl) == PARM_DECL
4449 || TREE_CODE (decl) == RESULT_DECL);
4451 type = TREE_TYPE (decl);
4452 if (DECL_BY_REFERENCE (decl))
4453 type = TREE_TYPE (type);
4455 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4456 VAR_DECL, DECL_NAME (decl), type);
4457 TREE_READONLY (copy) = TREE_READONLY (decl);
4458 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4459 if (!DECL_BY_REFERENCE (decl))
4461 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4462 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4465 return copy_decl_for_dup_finish (id, decl, copy);
4468 tree
4469 copy_decl_no_change (tree decl, copy_body_data *id)
4471 tree copy;
4473 copy = copy_node (decl);
4475 /* The COPY is not abstract; it will be generated in DST_FN. */
4476 DECL_ABSTRACT (copy) = 0;
4477 lang_hooks.dup_lang_specific_decl (copy);
4479 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4480 been taken; it's for internal bookkeeping in expand_goto_internal. */
4481 if (TREE_CODE (copy) == LABEL_DECL)
4483 TREE_ADDRESSABLE (copy) = 0;
4484 LABEL_DECL_UID (copy) = -1;
4487 return copy_decl_for_dup_finish (id, decl, copy);
4490 static tree
4491 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4493 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4494 return copy_decl_to_var (decl, id);
4495 else
4496 return copy_decl_no_change (decl, id);
4499 /* Return a copy of the function's argument tree. */
4500 static tree
4501 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4502 bitmap args_to_skip, tree *vars)
4504 tree arg, *parg;
4505 tree new_parm = NULL;
4506 int i = 0;
4508 parg = &new_parm;
4510 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4511 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4513 tree new_tree = remap_decl (arg, id);
4514 lang_hooks.dup_lang_specific_decl (new_tree);
4515 *parg = new_tree;
4516 parg = &TREE_CHAIN (new_tree);
4518 else if (!pointer_map_contains (id->decl_map, arg))
4520 /* Make an equivalent VAR_DECL. If the argument was used
4521 as temporary variable later in function, the uses will be
4522 replaced by local variable. */
4523 tree var = copy_decl_to_var (arg, id);
4524 get_var_ann (var);
4525 add_referenced_var (var);
4526 insert_decl_map (id, arg, var);
4527 /* Declare this new variable. */
4528 TREE_CHAIN (var) = *vars;
4529 *vars = var;
4531 return new_parm;
4534 /* Return a copy of the function's static chain. */
4535 static tree
4536 copy_static_chain (tree static_chain, copy_body_data * id)
4538 tree *chain_copy, *pvar;
4540 chain_copy = &static_chain;
4541 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4543 tree new_tree = remap_decl (*pvar, id);
4544 lang_hooks.dup_lang_specific_decl (new_tree);
4545 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4546 *pvar = new_tree;
4548 return static_chain;
4551 /* Return true if the function is allowed to be versioned.
4552 This is a guard for the versioning functionality. */
4554 bool
4555 tree_versionable_function_p (tree fndecl)
4557 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4558 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4561 /* Delete all unreachable basic blocks and update callgraph.
4562 Doing so is somewhat nontrivial because we need to update all clones and
4563 remove inline function that become unreachable. */
4565 static bool
4566 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4568 bool changed = false;
4569 basic_block b, next_bb;
4571 find_unreachable_blocks ();
4573 /* Delete all unreachable basic blocks. */
4575 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4577 next_bb = b->next_bb;
4579 if (!(b->flags & BB_REACHABLE))
4581 gimple_stmt_iterator bsi;
4583 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4584 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4586 struct cgraph_edge *e;
4587 struct cgraph_node *node;
4589 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4591 if (!e->inline_failed)
4592 cgraph_remove_node_and_inline_clones (e->callee);
4593 else
4594 cgraph_remove_edge (e);
4596 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4597 && id->dst_node->clones)
4598 for (node = id->dst_node->clones; node != id->dst_node;)
4600 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4602 if (!e->inline_failed)
4603 cgraph_remove_node_and_inline_clones (e->callee);
4604 else
4605 cgraph_remove_edge (e);
4608 if (node->clones)
4609 node = node->clones;
4610 else if (node->next_sibling_clone)
4611 node = node->next_sibling_clone;
4612 else
4614 while (node != id->dst_node && !node->next_sibling_clone)
4615 node = node->clone_of;
4616 if (node != id->dst_node)
4617 node = node->next_sibling_clone;
4621 delete_basic_block (b);
4622 changed = true;
4626 if (changed)
4627 tidy_fallthru_edges ();
4628 #ifdef ENABLE_CHECKING0
4629 verify_cgraph_node (id->dst_node);
4630 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4631 && id->dst_node->clones)
4633 struct cgraph_node *node;
4634 for (node = id->dst_node->clones; node != id->dst_node;)
4636 verify_cgraph_node (node);
4638 if (node->clones)
4639 node = node->clones;
4640 else if (node->next_sibling_clone)
4641 node = node->next_sibling_clone;
4642 else
4644 while (node != id->dst_node && !node->next_sibling_clone)
4645 node = node->clone_of;
4646 if (node != id->dst_node)
4647 node = node->next_sibling_clone;
4651 #endif
4652 return changed;
4655 /* Update clone info after duplication. */
4657 static void
4658 update_clone_info (copy_body_data * id)
4660 struct cgraph_node *node;
4661 if (!id->dst_node->clones)
4662 return;
4663 for (node = id->dst_node->clones; node != id->dst_node;)
4665 /* First update replace maps to match the new body. */
4666 if (node->clone.tree_map)
4668 unsigned int i;
4669 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4671 struct ipa_replace_map *replace_info;
4672 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4673 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4674 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4677 if (node->clones)
4678 node = node->clones;
4679 else if (node->next_sibling_clone)
4680 node = node->next_sibling_clone;
4681 else
4683 while (node != id->dst_node && !node->next_sibling_clone)
4684 node = node->clone_of;
4685 if (node != id->dst_node)
4686 node = node->next_sibling_clone;
4691 /* Create a copy of a function's tree.
4692 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4693 of the original function and the new copied function
4694 respectively. In case we want to replace a DECL
4695 tree with another tree while duplicating the function's
4696 body, TREE_MAP represents the mapping between these
4697 trees. If UPDATE_CLONES is set, the call_stmt fields
4698 of edges of clones of the function will be updated. */
4699 void
4700 tree_function_versioning (tree old_decl, tree new_decl,
4701 VEC(ipa_replace_map_p,gc)* tree_map,
4702 bool update_clones, bitmap args_to_skip)
4704 struct cgraph_node *old_version_node;
4705 struct cgraph_node *new_version_node;
4706 copy_body_data id;
4707 tree p;
4708 unsigned i;
4709 struct ipa_replace_map *replace_info;
4710 basic_block old_entry_block, bb;
4711 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4713 tree t_step;
4714 tree old_current_function_decl = current_function_decl;
4715 tree vars = NULL_TREE;
4717 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4718 && TREE_CODE (new_decl) == FUNCTION_DECL);
4719 DECL_POSSIBLY_INLINED (old_decl) = 1;
4721 old_version_node = cgraph_node (old_decl);
4722 new_version_node = cgraph_node (new_decl);
4724 /* Output the inlining info for this abstract function, since it has been
4725 inlined. If we don't do this now, we can lose the information about the
4726 variables in the function when the blocks get blown away as soon as we
4727 remove the cgraph node. */
4728 (*debug_hooks->outlining_inline_function) (old_decl);
4730 DECL_ARTIFICIAL (new_decl) = 1;
4731 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4733 /* Prepare the data structures for the tree copy. */
4734 memset (&id, 0, sizeof (id));
4736 /* Generate a new name for the new version. */
4737 id.statements_to_fold = pointer_set_create ();
4739 id.decl_map = pointer_map_create ();
4740 id.debug_map = NULL;
4741 id.src_fn = old_decl;
4742 id.dst_fn = new_decl;
4743 id.src_node = old_version_node;
4744 id.dst_node = new_version_node;
4745 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4747 id.copy_decl = copy_decl_no_change;
4748 id.transform_call_graph_edges
4749 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4750 id.transform_new_cfg = true;
4751 id.transform_return_to_modify = false;
4752 id.transform_lang_insert_block = NULL;
4754 current_function_decl = new_decl;
4755 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4756 (DECL_STRUCT_FUNCTION (old_decl));
4757 initialize_cfun (new_decl, old_decl,
4758 old_entry_block->count,
4759 old_entry_block->frequency);
4760 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4762 /* Copy the function's static chain. */
4763 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4764 if (p)
4765 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4766 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4767 &id);
4769 /* If there's a tree_map, prepare for substitution. */
4770 if (tree_map)
4771 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4773 gimple init;
4774 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4775 if (replace_info->replace_p)
4777 tree op = replace_info->new_tree;
4779 STRIP_NOPS (op);
4781 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4782 op = TREE_OPERAND (op, 0);
4784 if (TREE_CODE (op) == ADDR_EXPR)
4786 op = TREE_OPERAND (op, 0);
4787 while (handled_component_p (op))
4788 op = TREE_OPERAND (op, 0);
4789 if (TREE_CODE (op) == VAR_DECL)
4790 add_referenced_var (op);
4792 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4793 init = setup_one_parameter (&id, replace_info->old_tree,
4794 replace_info->new_tree, id.src_fn,
4795 NULL,
4796 &vars);
4797 if (init)
4798 VEC_safe_push (gimple, heap, init_stmts, init);
4801 /* Copy the function's arguments. */
4802 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4803 DECL_ARGUMENTS (new_decl) =
4804 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4805 args_to_skip, &vars);
4807 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4809 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4810 number_blocks (id.dst_fn);
4812 declare_inline_vars (DECL_INITIAL (new_decl), vars);
4814 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4815 /* Add local vars. */
4816 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4817 t_step; t_step = TREE_CHAIN (t_step))
4819 tree var = TREE_VALUE (t_step);
4820 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
4821 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
4822 else if (!can_be_nonlocal (var, &id))
4823 cfun->local_decls =
4824 tree_cons (NULL_TREE, remap_decl (var, &id),
4825 cfun->local_decls);
4828 /* Copy the Function's body. */
4829 copy_body (&id, old_entry_block->count, old_entry_block->frequency,
4830 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
4832 if (DECL_RESULT (old_decl) != NULL_TREE)
4834 tree *res_decl = &DECL_RESULT (old_decl);
4835 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4836 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4839 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4840 number_blocks (new_decl);
4842 /* We want to create the BB unconditionally, so that the addition of
4843 debug stmts doesn't affect BB count, which may in the end cause
4844 codegen differences. */
4845 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
4846 while (VEC_length (gimple, init_stmts))
4847 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
4848 update_clone_info (&id);
4850 /* Remap the nonlocal_goto_save_area, if any. */
4851 if (cfun->nonlocal_goto_save_area)
4853 struct walk_stmt_info wi;
4855 memset (&wi, 0, sizeof (wi));
4856 wi.info = &id;
4857 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
4860 /* Clean up. */
4861 pointer_map_destroy (id.decl_map);
4862 if (id.debug_map)
4863 pointer_map_destroy (id.debug_map);
4864 free_dominance_info (CDI_DOMINATORS);
4865 free_dominance_info (CDI_POST_DOMINATORS);
4867 fold_marked_statements (0, id.statements_to_fold);
4868 pointer_set_destroy (id.statements_to_fold);
4869 fold_cond_expr_cond ();
4870 delete_unreachable_blocks_update_callgraph (&id);
4871 update_ssa (TODO_update_ssa);
4872 free_dominance_info (CDI_DOMINATORS);
4873 free_dominance_info (CDI_POST_DOMINATORS);
4875 gcc_assert (!id.debug_stmts);
4876 VEC_free (gimple, heap, init_stmts);
4877 pop_cfun ();
4878 current_function_decl = old_current_function_decl;
4879 gcc_assert (!current_function_decl
4880 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
4881 return;
4884 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
4885 the callee and return the inlined body on success. */
4887 tree
4888 maybe_inline_call_in_expr (tree exp)
4890 tree fn = get_callee_fndecl (exp);
4892 /* We can only try to inline "const" functions. */
4893 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
4895 struct pointer_map_t *decl_map = pointer_map_create ();
4896 call_expr_arg_iterator iter;
4897 copy_body_data id;
4898 tree param, arg, t;
4900 /* Remap the parameters. */
4901 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
4902 param;
4903 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
4904 *pointer_map_insert (decl_map, param) = arg;
4906 memset (&id, 0, sizeof (id));
4907 id.src_fn = fn;
4908 id.dst_fn = current_function_decl;
4909 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
4910 id.decl_map = decl_map;
4912 id.copy_decl = copy_decl_no_change;
4913 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4914 id.transform_new_cfg = false;
4915 id.transform_return_to_modify = true;
4916 id.transform_lang_insert_block = false;
4918 /* Make sure not to unshare trees behind the front-end's back
4919 since front-end specific mechanisms may rely on sharing. */
4920 id.regimplify = false;
4921 id.do_not_unshare = true;
4923 /* We're not inside any EH region. */
4924 id.eh_region = -1;
4926 t = copy_tree_body (&id);
4927 pointer_map_destroy (decl_map);
4929 /* We can only return something suitable for use in a GENERIC
4930 expression tree. */
4931 if (TREE_CODE (t) == MODIFY_EXPR)
4932 return TREE_OPERAND (t, 1);
4935 return NULL_TREE;
4938 /* Duplicate a type, fields and all. */
4940 tree
4941 build_duplicate_type (tree type)
4943 struct copy_body_data id;
4945 memset (&id, 0, sizeof (id));
4946 id.src_fn = current_function_decl;
4947 id.dst_fn = current_function_decl;
4948 id.src_cfun = cfun;
4949 id.decl_map = pointer_map_create ();
4950 id.debug_map = NULL;
4951 id.copy_decl = copy_decl_no_change;
4953 type = remap_type_1 (type, &id);
4955 pointer_map_destroy (id.decl_map);
4956 if (id.debug_map)
4957 pointer_map_destroy (id.debug_map);
4959 TYPE_CANONICAL (type) = type;
4961 return type;
4964 /* Return whether it is safe to inline a function because it used different
4965 target specific options or call site actual types mismatch parameter types.
4966 E is the call edge to be checked. */
4967 bool
4968 tree_can_inline_p (struct cgraph_edge *e)
4970 #if 0
4971 /* This causes a regression in SPEC in that it prevents a cold function from
4972 inlining a hot function. Perhaps this should only apply to functions
4973 that the user declares hot/cold/optimize explicitly. */
4975 /* Don't inline a function with a higher optimization level than the
4976 caller, or with different space constraints (hot/cold functions). */
4977 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
4978 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
4980 if (caller_tree != callee_tree)
4982 struct cl_optimization *caller_opt
4983 = TREE_OPTIMIZATION ((caller_tree)
4984 ? caller_tree
4985 : optimization_default_node);
4987 struct cl_optimization *callee_opt
4988 = TREE_OPTIMIZATION ((callee_tree)
4989 ? callee_tree
4990 : optimization_default_node);
4992 if ((caller_opt->optimize > callee_opt->optimize)
4993 || (caller_opt->optimize_size != callee_opt->optimize_size))
4994 return false;
4996 #endif
4997 tree caller, callee;
4999 caller = e->caller->decl;
5000 callee = e->callee->decl;
5002 /* Allow the backend to decide if inlining is ok. */
5003 if (!targetm.target_option.can_inline_p (caller, callee))
5005 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5006 gimple_call_set_cannot_inline (e->call_stmt, true);
5007 return false;
5010 if (!gimple_check_call_args (e->call_stmt))
5012 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5013 gimple_call_set_cannot_inline (e->call_stmt, true);
5014 return false;
5017 return true;