2018-06-28 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-inline.c
blob427ef959740b10d8aebfc33976a7266008fbb0da
1 /* Tree inlining.
2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
64 /* I'm not real happy about this, but we need to handle gimple and
65 non-gimple trees. */
67 /* Inlining, Cloning, Versioning, Parallelization
69 Inlining: a function body is duplicated, but the PARM_DECLs are
70 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
71 MODIFY_EXPRs that store to a dedicated returned-value variable.
72 The duplicated eh_region info of the copy will later be appended
73 to the info for the caller; the eh_region info in copied throwing
74 statements and RESX statements are adjusted accordingly.
76 Cloning: (only in C++) We have one body for a con/de/structor, and
77 multiple function decls, each with a unique parameter list.
78 Duplicate the body, using the given splay tree; some parameters
79 will become constants (like 0 or 1).
81 Versioning: a function body is duplicated and the result is a new
82 function rather than into blocks of an existing function as with
83 inlining. Some parameters will become constants.
85 Parallelization: a region of a function is duplicated resulting in
86 a new function. Variables may be replaced with complex expressions
87 to enable shared variable semantics.
89 All of these will simultaneously lookup any callgraph edges. If
90 we're going to inline the duplicated function body, and the given
91 function has some cloned callgraph nodes (one for each place this
92 function will be inlined) those callgraph edges will be duplicated.
93 If we're cloning the body, those callgraph edges will be
94 updated to point into the new body. (Note that the original
95 callgraph node and edge list will not be altered.)
97 See the CALL_EXPR handling case in copy_tree_body_r (). */
99 /* To Do:
101 o In order to make inlining-on-trees work, we pessimized
102 function-local static constants. In particular, they are now
103 always output, even when not addressed. Fix this by treating
104 function-local static constants just like global static
105 constants; the back-end already knows not to output them if they
106 are not needed.
108 o Provide heuristics to clamp inlining of recursive template
109 calls? */
112 /* Weights that estimate_num_insns uses to estimate the size of the
113 produced code. */
115 eni_weights eni_size_weights;
117 /* Weights that estimate_num_insns uses to estimate the time necessary
118 to execute the produced code. */
120 eni_weights eni_time_weights;
122 /* Prototypes. */
124 static tree declare_return_variable (copy_body_data *, tree, tree,
125 basic_block);
126 static void remap_block (tree *, copy_body_data *);
127 static void copy_bind_expr (tree *, int *, copy_body_data *);
128 static void declare_inline_vars (tree, tree);
129 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
130 static void prepend_lexical_block (tree current_block, tree new_block);
131 static tree copy_decl_to_var (tree, copy_body_data *);
132 static tree copy_result_decl_to_var (tree, copy_body_data *);
133 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
134 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
135 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
136 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
141 void
142 insert_decl_map (copy_body_data *id, tree key, tree value)
144 id->decl_map->put (key, value);
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
148 if (key != value)
149 id->decl_map->put (value, value);
152 /* Insert a tree->tree mapping for ID. This is only used for
153 variables. */
155 static void
156 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
158 if (!gimple_in_ssa_p (id->src_cfun))
159 return;
161 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162 return;
164 if (!target_for_debug_bind (key))
165 return;
167 gcc_assert (TREE_CODE (key) == PARM_DECL);
168 gcc_assert (VAR_P (value));
170 if (!id->debug_map)
171 id->debug_map = new hash_map<tree, tree>;
173 id->debug_map->put (key, value);
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
179 context. */
180 static int processing_debug_stmt = 0;
182 /* Construct new SSA name for old NAME. ID is the inline context. */
184 static tree
185 remap_ssa_name (tree name, copy_body_data *id)
187 tree new_tree, var;
188 tree *n;
190 gcc_assert (TREE_CODE (name) == SSA_NAME);
192 n = id->decl_map->get (name);
193 if (n)
194 return unshare_expr (*n);
196 if (processing_debug_stmt)
198 if (SSA_NAME_IS_DEFAULT_DEF (name)
199 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
200 && id->entry_bb == NULL
201 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
203 tree vexpr = make_node (DEBUG_EXPR_DECL);
204 gimple *def_temp;
205 gimple_stmt_iterator gsi;
206 tree val = SSA_NAME_VAR (name);
208 n = id->decl_map->get (val);
209 if (n != NULL)
210 val = *n;
211 if (TREE_CODE (val) != PARM_DECL)
213 processing_debug_stmt = -1;
214 return name;
216 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
217 DECL_ARTIFICIAL (vexpr) = 1;
218 TREE_TYPE (vexpr) = TREE_TYPE (name);
219 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
220 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
221 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
222 return vexpr;
225 processing_debug_stmt = -1;
226 return name;
229 /* Remap anonymous SSA names or SSA names of anonymous decls. */
230 var = SSA_NAME_VAR (name);
231 if (!var
232 || (!SSA_NAME_IS_DEFAULT_DEF (name)
233 && VAR_P (var)
234 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
235 && DECL_ARTIFICIAL (var)
236 && DECL_IGNORED_P (var)
237 && !DECL_NAME (var)))
239 struct ptr_info_def *pi;
240 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
241 if (!var && SSA_NAME_IDENTIFIER (name))
242 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
243 insert_decl_map (id, name, new_tree);
244 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
245 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
246 /* At least IPA points-to info can be directly transferred. */
247 if (id->src_cfun->gimple_df
248 && id->src_cfun->gimple_df->ipa_pta
249 && POINTER_TYPE_P (TREE_TYPE (name))
250 && (pi = SSA_NAME_PTR_INFO (name))
251 && !pi->pt.anything)
253 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
254 new_pi->pt = pi->pt;
256 return new_tree;
259 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
260 in copy_bb. */
261 new_tree = remap_decl (var, id);
263 /* We might've substituted constant or another SSA_NAME for
264 the variable.
266 Replace the SSA name representing RESULT_DECL by variable during
267 inlining: this saves us from need to introduce PHI node in a case
268 return value is just partly initialized. */
269 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
270 && (!SSA_NAME_VAR (name)
271 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
272 || !id->transform_return_to_modify))
274 struct ptr_info_def *pi;
275 new_tree = make_ssa_name (new_tree);
276 insert_decl_map (id, name, new_tree);
277 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
278 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
279 /* At least IPA points-to info can be directly transferred. */
280 if (id->src_cfun->gimple_df
281 && id->src_cfun->gimple_df->ipa_pta
282 && POINTER_TYPE_P (TREE_TYPE (name))
283 && (pi = SSA_NAME_PTR_INFO (name))
284 && !pi->pt.anything)
286 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
287 new_pi->pt = pi->pt;
289 if (SSA_NAME_IS_DEFAULT_DEF (name))
291 /* By inlining function having uninitialized variable, we might
292 extend the lifetime (variable might get reused). This cause
293 ICE in the case we end up extending lifetime of SSA name across
294 abnormal edge, but also increase register pressure.
296 We simply initialize all uninitialized vars by 0 except
297 for case we are inlining to very first BB. We can avoid
298 this for all BBs that are not inside strongly connected
299 regions of the CFG, but this is expensive to test. */
300 if (id->entry_bb
301 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
302 && (!SSA_NAME_VAR (name)
303 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
304 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
305 0)->dest
306 || EDGE_COUNT (id->entry_bb->preds) != 1))
308 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
309 gimple *init_stmt;
310 tree zero = build_zero_cst (TREE_TYPE (new_tree));
312 init_stmt = gimple_build_assign (new_tree, zero);
313 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
314 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
316 else
318 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
319 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
323 else
324 insert_decl_map (id, name, new_tree);
325 return new_tree;
328 /* Remap DECL during the copying of the BLOCK tree for the function. */
330 tree
331 remap_decl (tree decl, copy_body_data *id)
333 tree *n;
335 /* We only remap local variables in the current function. */
337 /* See if we have remapped this declaration. */
339 n = id->decl_map->get (decl);
341 if (!n && processing_debug_stmt)
343 processing_debug_stmt = -1;
344 return decl;
347 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
348 necessary DECLs have already been remapped and we do not want to duplicate
349 a decl coming from outside of the sequence we are copying. */
350 if (!n
351 && id->prevent_decl_creation_for_types
352 && id->remapping_type_depth > 0
353 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
354 return decl;
356 /* If we didn't already have an equivalent for this declaration, create one
357 now. */
358 if (!n)
360 /* Make a copy of the variable or label. */
361 tree t = id->copy_decl (decl, id);
363 /* Remember it, so that if we encounter this local entity again
364 we can reuse this copy. Do this early because remap_type may
365 need this decl for TYPE_STUB_DECL. */
366 insert_decl_map (id, decl, t);
368 if (!DECL_P (t))
369 return t;
371 /* Remap types, if necessary. */
372 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
373 if (TREE_CODE (t) == TYPE_DECL)
375 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
377 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
378 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
379 is not set on the TYPE_DECL, for example in LTO mode. */
380 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
382 tree x = build_variant_type_copy (TREE_TYPE (t));
383 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
384 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
385 DECL_ORIGINAL_TYPE (t) = x;
389 /* Remap sizes as necessary. */
390 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
391 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
393 /* If fields, do likewise for offset and qualifier. */
394 if (TREE_CODE (t) == FIELD_DECL)
396 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
397 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
398 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
401 return t;
404 if (id->do_not_unshare)
405 return *n;
406 else
407 return unshare_expr (*n);
410 static tree
411 remap_type_1 (tree type, copy_body_data *id)
413 tree new_tree, t;
415 /* We do need a copy. build and register it now. If this is a pointer or
416 reference type, remap the designated type and make a new pointer or
417 reference type. */
418 if (TREE_CODE (type) == POINTER_TYPE)
420 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
421 TYPE_MODE (type),
422 TYPE_REF_CAN_ALIAS_ALL (type));
423 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
424 new_tree = build_type_attribute_qual_variant (new_tree,
425 TYPE_ATTRIBUTES (type),
426 TYPE_QUALS (type));
427 insert_decl_map (id, type, new_tree);
428 return new_tree;
430 else if (TREE_CODE (type) == REFERENCE_TYPE)
432 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
433 TYPE_MODE (type),
434 TYPE_REF_CAN_ALIAS_ALL (type));
435 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
436 new_tree = build_type_attribute_qual_variant (new_tree,
437 TYPE_ATTRIBUTES (type),
438 TYPE_QUALS (type));
439 insert_decl_map (id, type, new_tree);
440 return new_tree;
442 else
443 new_tree = copy_node (type);
445 insert_decl_map (id, type, new_tree);
447 /* This is a new type, not a copy of an old type. Need to reassociate
448 variants. We can handle everything except the main variant lazily. */
449 t = TYPE_MAIN_VARIANT (type);
450 if (type != t)
452 t = remap_type (t, id);
453 TYPE_MAIN_VARIANT (new_tree) = t;
454 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
455 TYPE_NEXT_VARIANT (t) = new_tree;
457 else
459 TYPE_MAIN_VARIANT (new_tree) = new_tree;
460 TYPE_NEXT_VARIANT (new_tree) = NULL;
463 if (TYPE_STUB_DECL (type))
464 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
466 /* Lazily create pointer and reference types. */
467 TYPE_POINTER_TO (new_tree) = NULL;
468 TYPE_REFERENCE_TO (new_tree) = NULL;
470 /* Copy all types that may contain references to local variables; be sure to
471 preserve sharing in between type and its main variant when possible. */
472 switch (TREE_CODE (new_tree))
474 case INTEGER_TYPE:
475 case REAL_TYPE:
476 case FIXED_POINT_TYPE:
477 case ENUMERAL_TYPE:
478 case BOOLEAN_TYPE:
479 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
481 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
482 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
484 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
485 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
487 else
489 t = TYPE_MIN_VALUE (new_tree);
490 if (t && TREE_CODE (t) != INTEGER_CST)
491 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
493 t = TYPE_MAX_VALUE (new_tree);
494 if (t && TREE_CODE (t) != INTEGER_CST)
495 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
497 return new_tree;
499 case FUNCTION_TYPE:
500 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
501 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
502 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
503 else
504 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
505 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
506 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
507 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
508 else
509 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
510 return new_tree;
512 case ARRAY_TYPE:
513 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
514 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
515 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
516 else
517 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
519 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
521 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
522 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
524 else
525 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
526 break;
528 case RECORD_TYPE:
529 case UNION_TYPE:
530 case QUAL_UNION_TYPE:
531 if (TYPE_MAIN_VARIANT (type) != type
532 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
533 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
534 else
536 tree f, nf = NULL;
538 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
540 t = remap_decl (f, id);
541 DECL_CONTEXT (t) = new_tree;
542 DECL_CHAIN (t) = nf;
543 nf = t;
545 TYPE_FIELDS (new_tree) = nreverse (nf);
547 break;
549 case OFFSET_TYPE:
550 default:
551 /* Shouldn't have been thought variable sized. */
552 gcc_unreachable ();
555 /* All variants of type share the same size, so use the already remaped data. */
556 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
558 tree s = TYPE_SIZE (type);
559 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
560 tree su = TYPE_SIZE_UNIT (type);
561 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
562 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
563 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
564 || s == mvs);
565 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
566 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
567 || su == mvsu);
568 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
569 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
571 else
573 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
574 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
577 return new_tree;
580 tree
581 remap_type (tree type, copy_body_data *id)
583 tree *node;
584 tree tmp;
586 if (type == NULL)
587 return type;
589 /* See if we have remapped this type. */
590 node = id->decl_map->get (type);
591 if (node)
592 return *node;
594 /* The type only needs remapping if it's variably modified. */
595 if (! variably_modified_type_p (type, id->src_fn))
597 insert_decl_map (id, type, type);
598 return type;
601 id->remapping_type_depth++;
602 tmp = remap_type_1 (type, id);
603 id->remapping_type_depth--;
605 return tmp;
608 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
610 static bool
611 can_be_nonlocal (tree decl, copy_body_data *id)
613 /* We can not duplicate function decls. */
614 if (TREE_CODE (decl) == FUNCTION_DECL)
615 return true;
617 /* Local static vars must be non-local or we get multiple declaration
618 problems. */
619 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
620 return true;
622 return false;
625 static tree
626 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
627 copy_body_data *id)
629 tree old_var;
630 tree new_decls = NULL_TREE;
632 /* Remap its variables. */
633 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
635 tree new_var;
637 if (can_be_nonlocal (old_var, id))
639 /* We need to add this variable to the local decls as otherwise
640 nothing else will do so. */
641 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
642 add_local_decl (cfun, old_var);
643 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
644 && !DECL_IGNORED_P (old_var)
645 && nonlocalized_list)
646 vec_safe_push (*nonlocalized_list, old_var);
647 continue;
650 /* Remap the variable. */
651 new_var = remap_decl (old_var, id);
653 /* If we didn't remap this variable, we can't mess with its
654 TREE_CHAIN. If we remapped this variable to the return slot, it's
655 already declared somewhere else, so don't declare it here. */
657 if (new_var == id->retvar)
659 else if (!new_var)
661 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
662 && !DECL_IGNORED_P (old_var)
663 && nonlocalized_list)
664 vec_safe_push (*nonlocalized_list, old_var);
666 else
668 gcc_assert (DECL_P (new_var));
669 DECL_CHAIN (new_var) = new_decls;
670 new_decls = new_var;
672 /* Also copy value-expressions. */
673 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
675 tree tem = DECL_VALUE_EXPR (new_var);
676 bool old_regimplify = id->regimplify;
677 id->remapping_type_depth++;
678 walk_tree (&tem, copy_tree_body_r, id, NULL);
679 id->remapping_type_depth--;
680 id->regimplify = old_regimplify;
681 SET_DECL_VALUE_EXPR (new_var, tem);
686 return nreverse (new_decls);
689 /* Copy the BLOCK to contain remapped versions of the variables
690 therein. And hook the new block into the block-tree. */
692 static void
693 remap_block (tree *block, copy_body_data *id)
695 tree old_block;
696 tree new_block;
698 /* Make the new block. */
699 old_block = *block;
700 new_block = make_node (BLOCK);
701 TREE_USED (new_block) = TREE_USED (old_block);
702 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
703 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
704 BLOCK_NONLOCALIZED_VARS (new_block)
705 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
706 *block = new_block;
708 /* Remap its variables. */
709 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
710 &BLOCK_NONLOCALIZED_VARS (new_block),
711 id);
713 if (id->transform_lang_insert_block)
714 id->transform_lang_insert_block (new_block);
716 /* Remember the remapped block. */
717 insert_decl_map (id, old_block, new_block);
720 /* Copy the whole block tree and root it in id->block. */
722 static tree
723 remap_blocks (tree block, copy_body_data *id)
725 tree t;
726 tree new_tree = block;
728 if (!block)
729 return NULL;
731 remap_block (&new_tree, id);
732 gcc_assert (new_tree != block);
733 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
734 prepend_lexical_block (new_tree, remap_blocks (t, id));
735 /* Blocks are in arbitrary order, but make things slightly prettier and do
736 not swap order when producing a copy. */
737 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
738 return new_tree;
741 /* Remap the block tree rooted at BLOCK to nothing. */
743 static void
744 remap_blocks_to_null (tree block, copy_body_data *id)
746 tree t;
747 insert_decl_map (id, block, NULL_TREE);
748 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
749 remap_blocks_to_null (t, id);
752 /* Remap the location info pointed to by LOCUS. */
754 static location_t
755 remap_location (location_t locus, copy_body_data *id)
757 if (LOCATION_BLOCK (locus))
759 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
760 gcc_assert (n);
761 if (*n)
762 return set_block (locus, *n);
765 locus = LOCATION_LOCUS (locus);
767 if (locus != UNKNOWN_LOCATION && id->block)
768 return set_block (locus, id->block);
770 return locus;
773 static void
774 copy_statement_list (tree *tp)
776 tree_stmt_iterator oi, ni;
777 tree new_tree;
779 new_tree = alloc_stmt_list ();
780 ni = tsi_start (new_tree);
781 oi = tsi_start (*tp);
782 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
783 *tp = new_tree;
785 for (; !tsi_end_p (oi); tsi_next (&oi))
787 tree stmt = tsi_stmt (oi);
788 if (TREE_CODE (stmt) == STATEMENT_LIST)
789 /* This copy is not redundant; tsi_link_after will smash this
790 STATEMENT_LIST into the end of the one we're building, and we
791 don't want to do that with the original. */
792 copy_statement_list (&stmt);
793 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
797 static void
798 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
800 tree block = BIND_EXPR_BLOCK (*tp);
801 /* Copy (and replace) the statement. */
802 copy_tree_r (tp, walk_subtrees, NULL);
803 if (block)
805 remap_block (&block, id);
806 BIND_EXPR_BLOCK (*tp) = block;
809 if (BIND_EXPR_VARS (*tp))
810 /* This will remap a lot of the same decls again, but this should be
811 harmless. */
812 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
816 /* Create a new gimple_seq by remapping all the statements in BODY
817 using the inlining information in ID. */
819 static gimple_seq
820 remap_gimple_seq (gimple_seq body, copy_body_data *id)
822 gimple_stmt_iterator si;
823 gimple_seq new_body = NULL;
825 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
827 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
828 gimple_seq_add_seq (&new_body, new_stmts);
831 return new_body;
835 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
836 block using the mapping information in ID. */
838 static gimple *
839 copy_gimple_bind (gbind *stmt, copy_body_data *id)
841 gimple *new_bind;
842 tree new_block, new_vars;
843 gimple_seq body, new_body;
845 /* Copy the statement. Note that we purposely don't use copy_stmt
846 here because we need to remap statements as we copy. */
847 body = gimple_bind_body (stmt);
848 new_body = remap_gimple_seq (body, id);
850 new_block = gimple_bind_block (stmt);
851 if (new_block)
852 remap_block (&new_block, id);
854 /* This will remap a lot of the same decls again, but this should be
855 harmless. */
856 new_vars = gimple_bind_vars (stmt);
857 if (new_vars)
858 new_vars = remap_decls (new_vars, NULL, id);
860 new_bind = gimple_build_bind (new_vars, new_body, new_block);
862 return new_bind;
865 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
867 static bool
868 is_parm (tree decl)
870 if (TREE_CODE (decl) == SSA_NAME)
872 decl = SSA_NAME_VAR (decl);
873 if (!decl)
874 return false;
877 return (TREE_CODE (decl) == PARM_DECL);
880 /* Remap the dependence CLIQUE from the source to the destination function
881 as specified in ID. */
883 static unsigned short
884 remap_dependence_clique (copy_body_data *id, unsigned short clique)
886 if (clique == 0 || processing_debug_stmt)
887 return 0;
888 if (!id->dependence_map)
889 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
890 bool existed;
891 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
892 if (!existed)
893 newc = ++cfun->last_clique;
894 return newc;
897 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
898 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
899 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
900 recursing into the children nodes of *TP. */
902 static tree
903 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
905 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
906 copy_body_data *id = (copy_body_data *) wi_p->info;
907 tree fn = id->src_fn;
909 /* For recursive invocations this is no longer the LHS itself. */
910 bool is_lhs = wi_p->is_lhs;
911 wi_p->is_lhs = false;
913 if (TREE_CODE (*tp) == SSA_NAME)
915 *tp = remap_ssa_name (*tp, id);
916 *walk_subtrees = 0;
917 if (is_lhs)
918 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
919 return NULL;
921 else if (auto_var_in_fn_p (*tp, fn))
923 /* Local variables and labels need to be replaced by equivalent
924 variables. We don't want to copy static variables; there's
925 only one of those, no matter how many times we inline the
926 containing function. Similarly for globals from an outer
927 function. */
928 tree new_decl;
930 /* Remap the declaration. */
931 new_decl = remap_decl (*tp, id);
932 gcc_assert (new_decl);
933 /* Replace this variable with the copy. */
934 STRIP_TYPE_NOPS (new_decl);
935 /* ??? The C++ frontend uses void * pointer zero to initialize
936 any other type. This confuses the middle-end type verification.
937 As cloned bodies do not go through gimplification again the fixup
938 there doesn't trigger. */
939 if (TREE_CODE (new_decl) == INTEGER_CST
940 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
941 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
942 *tp = new_decl;
943 *walk_subtrees = 0;
945 else if (TREE_CODE (*tp) == STATEMENT_LIST)
946 gcc_unreachable ();
947 else if (TREE_CODE (*tp) == SAVE_EXPR)
948 gcc_unreachable ();
949 else if (TREE_CODE (*tp) == LABEL_DECL
950 && (!DECL_CONTEXT (*tp)
951 || decl_function_context (*tp) == id->src_fn))
952 /* These may need to be remapped for EH handling. */
953 *tp = remap_decl (*tp, id);
954 else if (TREE_CODE (*tp) == FIELD_DECL)
956 /* If the enclosing record type is variably_modified_type_p, the field
957 has already been remapped. Otherwise, it need not be. */
958 tree *n = id->decl_map->get (*tp);
959 if (n)
960 *tp = *n;
961 *walk_subtrees = 0;
963 else if (TYPE_P (*tp))
964 /* Types may need remapping as well. */
965 *tp = remap_type (*tp, id);
966 else if (CONSTANT_CLASS_P (*tp))
968 /* If this is a constant, we have to copy the node iff the type
969 will be remapped. copy_tree_r will not copy a constant. */
970 tree new_type = remap_type (TREE_TYPE (*tp), id);
972 if (new_type == TREE_TYPE (*tp))
973 *walk_subtrees = 0;
975 else if (TREE_CODE (*tp) == INTEGER_CST)
976 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
977 else
979 *tp = copy_node (*tp);
980 TREE_TYPE (*tp) = new_type;
983 else
985 /* Otherwise, just copy the node. Note that copy_tree_r already
986 knows not to copy VAR_DECLs, etc., so this is safe. */
988 if (TREE_CODE (*tp) == MEM_REF)
990 /* We need to re-canonicalize MEM_REFs from inline substitutions
991 that can happen when a pointer argument is an ADDR_EXPR.
992 Recurse here manually to allow that. */
993 tree ptr = TREE_OPERAND (*tp, 0);
994 tree type = remap_type (TREE_TYPE (*tp), id);
995 tree old = *tp;
996 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
997 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
998 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
999 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1000 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1001 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1003 MR_DEPENDENCE_CLIQUE (*tp)
1004 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1005 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1007 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1008 remapped a parameter as the property might be valid only
1009 for the parameter itself. */
1010 if (TREE_THIS_NOTRAP (old)
1011 && (!is_parm (TREE_OPERAND (old, 0))
1012 || (!id->transform_parameter && is_parm (ptr))))
1013 TREE_THIS_NOTRAP (*tp) = 1;
1014 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1015 *walk_subtrees = 0;
1016 return NULL;
1019 /* Here is the "usual case". Copy this tree node, and then
1020 tweak some special cases. */
1021 copy_tree_r (tp, walk_subtrees, NULL);
1023 if (TREE_CODE (*tp) != OMP_CLAUSE)
1024 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1026 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1028 /* The copied TARGET_EXPR has never been expanded, even if the
1029 original node was expanded already. */
1030 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1031 TREE_OPERAND (*tp, 3) = NULL_TREE;
1033 else if (TREE_CODE (*tp) == ADDR_EXPR)
1035 /* Variable substitution need not be simple. In particular,
1036 the MEM_REF substitution above. Make sure that
1037 TREE_CONSTANT and friends are up-to-date. */
1038 int invariant = is_gimple_min_invariant (*tp);
1039 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1040 recompute_tree_invariant_for_addr_expr (*tp);
1042 /* If this used to be invariant, but is not any longer,
1043 then regimplification is probably needed. */
1044 if (invariant && !is_gimple_min_invariant (*tp))
1045 id->regimplify = true;
1047 *walk_subtrees = 0;
1051 /* Update the TREE_BLOCK for the cloned expr. */
1052 if (EXPR_P (*tp))
1054 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1055 tree old_block = TREE_BLOCK (*tp);
1056 if (old_block)
1058 tree *n;
1059 n = id->decl_map->get (TREE_BLOCK (*tp));
1060 if (n)
1061 new_block = *n;
1063 TREE_SET_BLOCK (*tp, new_block);
1066 /* Keep iterating. */
1067 return NULL_TREE;
1071 /* Called from copy_body_id via walk_tree. DATA is really a
1072 `copy_body_data *'. */
1074 tree
1075 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1077 copy_body_data *id = (copy_body_data *) data;
1078 tree fn = id->src_fn;
1079 tree new_block;
1081 /* Begin by recognizing trees that we'll completely rewrite for the
1082 inlining context. Our output for these trees is completely
1083 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1084 into an edge). Further down, we'll handle trees that get
1085 duplicated and/or tweaked. */
1087 /* When requested, RETURN_EXPRs should be transformed to just the
1088 contained MODIFY_EXPR. The branch semantics of the return will
1089 be handled elsewhere by manipulating the CFG rather than a statement. */
1090 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1092 tree assignment = TREE_OPERAND (*tp, 0);
1094 /* If we're returning something, just turn that into an
1095 assignment into the equivalent of the original RESULT_DECL.
1096 If the "assignment" is just the result decl, the result
1097 decl has already been set (e.g. a recent "foo (&result_decl,
1098 ...)"); just toss the entire RETURN_EXPR. */
1099 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1101 /* Replace the RETURN_EXPR with (a copy of) the
1102 MODIFY_EXPR hanging underneath. */
1103 *tp = copy_node (assignment);
1105 else /* Else the RETURN_EXPR returns no value. */
1107 *tp = NULL;
1108 return (tree) (void *)1;
1111 else if (TREE_CODE (*tp) == SSA_NAME)
1113 *tp = remap_ssa_name (*tp, id);
1114 *walk_subtrees = 0;
1115 return NULL;
1118 /* Local variables and labels need to be replaced by equivalent
1119 variables. We don't want to copy static variables; there's only
1120 one of those, no matter how many times we inline the containing
1121 function. Similarly for globals from an outer function. */
1122 else if (auto_var_in_fn_p (*tp, fn))
1124 tree new_decl;
1126 /* Remap the declaration. */
1127 new_decl = remap_decl (*tp, id);
1128 gcc_assert (new_decl);
1129 /* Replace this variable with the copy. */
1130 STRIP_TYPE_NOPS (new_decl);
1131 *tp = new_decl;
1132 *walk_subtrees = 0;
1134 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1135 copy_statement_list (tp);
1136 else if (TREE_CODE (*tp) == SAVE_EXPR
1137 || TREE_CODE (*tp) == TARGET_EXPR)
1138 remap_save_expr (tp, id->decl_map, walk_subtrees);
1139 else if (TREE_CODE (*tp) == LABEL_DECL
1140 && (! DECL_CONTEXT (*tp)
1141 || decl_function_context (*tp) == id->src_fn))
1142 /* These may need to be remapped for EH handling. */
1143 *tp = remap_decl (*tp, id);
1144 else if (TREE_CODE (*tp) == BIND_EXPR)
1145 copy_bind_expr (tp, walk_subtrees, id);
1146 /* Types may need remapping as well. */
1147 else if (TYPE_P (*tp))
1148 *tp = remap_type (*tp, id);
1150 /* If this is a constant, we have to copy the node iff the type will be
1151 remapped. copy_tree_r will not copy a constant. */
1152 else if (CONSTANT_CLASS_P (*tp))
1154 tree new_type = remap_type (TREE_TYPE (*tp), id);
1156 if (new_type == TREE_TYPE (*tp))
1157 *walk_subtrees = 0;
1159 else if (TREE_CODE (*tp) == INTEGER_CST)
1160 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1161 else
1163 *tp = copy_node (*tp);
1164 TREE_TYPE (*tp) = new_type;
1168 /* Otherwise, just copy the node. Note that copy_tree_r already
1169 knows not to copy VAR_DECLs, etc., so this is safe. */
1170 else
1172 /* Here we handle trees that are not completely rewritten.
1173 First we detect some inlining-induced bogosities for
1174 discarding. */
1175 if (TREE_CODE (*tp) == MODIFY_EXPR
1176 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1177 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1179 /* Some assignments VAR = VAR; don't generate any rtl code
1180 and thus don't count as variable modification. Avoid
1181 keeping bogosities like 0 = 0. */
1182 tree decl = TREE_OPERAND (*tp, 0), value;
1183 tree *n;
1185 n = id->decl_map->get (decl);
1186 if (n)
1188 value = *n;
1189 STRIP_TYPE_NOPS (value);
1190 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1192 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1193 return copy_tree_body_r (tp, walk_subtrees, data);
1197 else if (TREE_CODE (*tp) == INDIRECT_REF)
1199 /* Get rid of *& from inline substitutions that can happen when a
1200 pointer argument is an ADDR_EXPR. */
1201 tree decl = TREE_OPERAND (*tp, 0);
1202 tree *n = id->decl_map->get (decl);
1203 if (n)
1205 /* If we happen to get an ADDR_EXPR in n->value, strip
1206 it manually here as we'll eventually get ADDR_EXPRs
1207 which lie about their types pointed to. In this case
1208 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1209 but we absolutely rely on that. As fold_indirect_ref
1210 does other useful transformations, try that first, though. */
1211 tree type = TREE_TYPE (*tp);
1212 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1213 tree old = *tp;
1214 *tp = gimple_fold_indirect_ref (ptr);
1215 if (! *tp)
1217 type = remap_type (type, id);
1218 if (TREE_CODE (ptr) == ADDR_EXPR)
1221 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1222 /* ??? We should either assert here or build
1223 a VIEW_CONVERT_EXPR instead of blindly leaking
1224 incompatible types to our IL. */
1225 if (! *tp)
1226 *tp = TREE_OPERAND (ptr, 0);
1228 else
1230 *tp = build1 (INDIRECT_REF, type, ptr);
1231 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1232 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1233 TREE_READONLY (*tp) = TREE_READONLY (old);
1234 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1235 have remapped a parameter as the property might be
1236 valid only for the parameter itself. */
1237 if (TREE_THIS_NOTRAP (old)
1238 && (!is_parm (TREE_OPERAND (old, 0))
1239 || (!id->transform_parameter && is_parm (ptr))))
1240 TREE_THIS_NOTRAP (*tp) = 1;
1243 *walk_subtrees = 0;
1244 return NULL;
1247 else if (TREE_CODE (*tp) == MEM_REF)
1249 /* We need to re-canonicalize MEM_REFs from inline substitutions
1250 that can happen when a pointer argument is an ADDR_EXPR.
1251 Recurse here manually to allow that. */
1252 tree ptr = TREE_OPERAND (*tp, 0);
1253 tree type = remap_type (TREE_TYPE (*tp), id);
1254 tree old = *tp;
1255 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1256 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1257 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1258 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1259 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1260 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1262 MR_DEPENDENCE_CLIQUE (*tp)
1263 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1264 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1266 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1267 remapped a parameter as the property might be valid only
1268 for the parameter itself. */
1269 if (TREE_THIS_NOTRAP (old)
1270 && (!is_parm (TREE_OPERAND (old, 0))
1271 || (!id->transform_parameter && is_parm (ptr))))
1272 TREE_THIS_NOTRAP (*tp) = 1;
1273 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1274 *walk_subtrees = 0;
1275 return NULL;
1278 /* Here is the "usual case". Copy this tree node, and then
1279 tweak some special cases. */
1280 copy_tree_r (tp, walk_subtrees, NULL);
1282 /* If EXPR has block defined, map it to newly constructed block.
1283 When inlining we want EXPRs without block appear in the block
1284 of function call if we are not remapping a type. */
1285 if (EXPR_P (*tp))
1287 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1288 if (TREE_BLOCK (*tp))
1290 tree *n;
1291 n = id->decl_map->get (TREE_BLOCK (*tp));
1292 if (n)
1293 new_block = *n;
1295 TREE_SET_BLOCK (*tp, new_block);
1298 if (TREE_CODE (*tp) != OMP_CLAUSE)
1299 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1301 /* The copied TARGET_EXPR has never been expanded, even if the
1302 original node was expanded already. */
1303 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1305 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1306 TREE_OPERAND (*tp, 3) = NULL_TREE;
1309 /* Variable substitution need not be simple. In particular, the
1310 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1311 and friends are up-to-date. */
1312 else if (TREE_CODE (*tp) == ADDR_EXPR)
1314 int invariant = is_gimple_min_invariant (*tp);
1315 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1317 /* Handle the case where we substituted an INDIRECT_REF
1318 into the operand of the ADDR_EXPR. */
1319 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1321 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1322 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1323 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1324 *tp = t;
1326 else
1327 recompute_tree_invariant_for_addr_expr (*tp);
1329 /* If this used to be invariant, but is not any longer,
1330 then regimplification is probably needed. */
1331 if (invariant && !is_gimple_min_invariant (*tp))
1332 id->regimplify = true;
1334 *walk_subtrees = 0;
1338 /* Keep iterating. */
1339 return NULL_TREE;
1342 /* Helper for remap_gimple_stmt. Given an EH region number for the
1343 source function, map that to the duplicate EH region number in
1344 the destination function. */
1346 static int
1347 remap_eh_region_nr (int old_nr, copy_body_data *id)
1349 eh_region old_r, new_r;
1351 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1352 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1354 return new_r->index;
1357 /* Similar, but operate on INTEGER_CSTs. */
1359 static tree
1360 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1362 int old_nr, new_nr;
1364 old_nr = tree_to_shwi (old_t_nr);
1365 new_nr = remap_eh_region_nr (old_nr, id);
1367 return build_int_cst (integer_type_node, new_nr);
1370 /* Helper for copy_bb. Remap statement STMT using the inlining
1371 information in ID. Return the new statement copy. */
1373 static gimple_seq
1374 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1376 gimple *copy = NULL;
1377 struct walk_stmt_info wi;
1378 bool skip_first = false;
1379 gimple_seq stmts = NULL;
1381 if (is_gimple_debug (stmt)
1382 && (gimple_debug_nonbind_marker_p (stmt)
1383 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1384 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1385 return NULL;
1387 /* Begin by recognizing trees that we'll completely rewrite for the
1388 inlining context. Our output for these trees is completely
1389 different from our input (e.g. RETURN_EXPR is deleted and morphs
1390 into an edge). Further down, we'll handle trees that get
1391 duplicated and/or tweaked. */
1393 /* When requested, GIMPLE_RETURN should be transformed to just the
1394 contained GIMPLE_ASSIGN. The branch semantics of the return will
1395 be handled elsewhere by manipulating the CFG rather than the
1396 statement. */
1397 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1399 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1401 /* If we're returning something, just turn that into an
1402 assignment to the equivalent of the original RESULT_DECL.
1403 If RETVAL is just the result decl, the result decl has
1404 already been set (e.g. a recent "foo (&result_decl, ...)");
1405 just toss the entire GIMPLE_RETURN. */
1406 if (retval
1407 && (TREE_CODE (retval) != RESULT_DECL
1408 && (TREE_CODE (retval) != SSA_NAME
1409 || ! SSA_NAME_VAR (retval)
1410 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1412 copy = gimple_build_assign (id->do_not_unshare
1413 ? id->retvar : unshare_expr (id->retvar),
1414 retval);
1415 /* id->retvar is already substituted. Skip it on later remapping. */
1416 skip_first = true;
1418 else
1419 return NULL;
1421 else if (gimple_has_substatements (stmt))
1423 gimple_seq s1, s2;
1425 /* When cloning bodies from the C++ front end, we will be handed bodies
1426 in High GIMPLE form. Handle here all the High GIMPLE statements that
1427 have embedded statements. */
1428 switch (gimple_code (stmt))
1430 case GIMPLE_BIND:
1431 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1432 break;
1434 case GIMPLE_CATCH:
1436 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1437 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1438 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1440 break;
1442 case GIMPLE_EH_FILTER:
1443 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1444 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1445 break;
1447 case GIMPLE_TRY:
1448 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1449 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1450 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1451 break;
1453 case GIMPLE_WITH_CLEANUP_EXPR:
1454 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1455 copy = gimple_build_wce (s1);
1456 break;
1458 case GIMPLE_OMP_PARALLEL:
1460 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1461 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1462 copy = gimple_build_omp_parallel
1463 (s1,
1464 gimple_omp_parallel_clauses (omp_par_stmt),
1465 gimple_omp_parallel_child_fn (omp_par_stmt),
1466 gimple_omp_parallel_data_arg (omp_par_stmt));
1468 break;
1470 case GIMPLE_OMP_TASK:
1471 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1472 copy = gimple_build_omp_task
1473 (s1,
1474 gimple_omp_task_clauses (stmt),
1475 gimple_omp_task_child_fn (stmt),
1476 gimple_omp_task_data_arg (stmt),
1477 gimple_omp_task_copy_fn (stmt),
1478 gimple_omp_task_arg_size (stmt),
1479 gimple_omp_task_arg_align (stmt));
1480 break;
1482 case GIMPLE_OMP_FOR:
1483 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1484 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1485 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1486 gimple_omp_for_clauses (stmt),
1487 gimple_omp_for_collapse (stmt), s2);
1489 size_t i;
1490 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1492 gimple_omp_for_set_index (copy, i,
1493 gimple_omp_for_index (stmt, i));
1494 gimple_omp_for_set_initial (copy, i,
1495 gimple_omp_for_initial (stmt, i));
1496 gimple_omp_for_set_final (copy, i,
1497 gimple_omp_for_final (stmt, i));
1498 gimple_omp_for_set_incr (copy, i,
1499 gimple_omp_for_incr (stmt, i));
1500 gimple_omp_for_set_cond (copy, i,
1501 gimple_omp_for_cond (stmt, i));
1504 break;
1506 case GIMPLE_OMP_MASTER:
1507 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1508 copy = gimple_build_omp_master (s1);
1509 break;
1511 case GIMPLE_OMP_TASKGROUP:
1512 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1513 copy = gimple_build_omp_taskgroup (s1);
1514 break;
1516 case GIMPLE_OMP_ORDERED:
1517 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1518 copy = gimple_build_omp_ordered
1519 (s1,
1520 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1521 break;
1523 case GIMPLE_OMP_SECTION:
1524 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1525 copy = gimple_build_omp_section (s1);
1526 break;
1528 case GIMPLE_OMP_SECTIONS:
1529 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1530 copy = gimple_build_omp_sections
1531 (s1, gimple_omp_sections_clauses (stmt));
1532 break;
1534 case GIMPLE_OMP_SINGLE:
1535 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1536 copy = gimple_build_omp_single
1537 (s1, gimple_omp_single_clauses (stmt));
1538 break;
1540 case GIMPLE_OMP_TARGET:
1541 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1542 copy = gimple_build_omp_target
1543 (s1, gimple_omp_target_kind (stmt),
1544 gimple_omp_target_clauses (stmt));
1545 break;
1547 case GIMPLE_OMP_TEAMS:
1548 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1549 copy = gimple_build_omp_teams
1550 (s1, gimple_omp_teams_clauses (stmt));
1551 break;
1553 case GIMPLE_OMP_CRITICAL:
1554 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1555 copy = gimple_build_omp_critical (s1,
1556 gimple_omp_critical_name
1557 (as_a <gomp_critical *> (stmt)),
1558 gimple_omp_critical_clauses
1559 (as_a <gomp_critical *> (stmt)));
1560 break;
1562 case GIMPLE_TRANSACTION:
1564 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1565 gtransaction *new_trans_stmt;
1566 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1567 id);
1568 copy = new_trans_stmt = gimple_build_transaction (s1);
1569 gimple_transaction_set_subcode (new_trans_stmt,
1570 gimple_transaction_subcode (old_trans_stmt));
1571 gimple_transaction_set_label_norm (new_trans_stmt,
1572 gimple_transaction_label_norm (old_trans_stmt));
1573 gimple_transaction_set_label_uninst (new_trans_stmt,
1574 gimple_transaction_label_uninst (old_trans_stmt));
1575 gimple_transaction_set_label_over (new_trans_stmt,
1576 gimple_transaction_label_over (old_trans_stmt));
1578 break;
1580 default:
1581 gcc_unreachable ();
1584 else
1586 if (gimple_assign_copy_p (stmt)
1587 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1588 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1590 /* Here we handle statements that are not completely rewritten.
1591 First we detect some inlining-induced bogosities for
1592 discarding. */
1594 /* Some assignments VAR = VAR; don't generate any rtl code
1595 and thus don't count as variable modification. Avoid
1596 keeping bogosities like 0 = 0. */
1597 tree decl = gimple_assign_lhs (stmt), value;
1598 tree *n;
1600 n = id->decl_map->get (decl);
1601 if (n)
1603 value = *n;
1604 STRIP_TYPE_NOPS (value);
1605 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1606 return NULL;
1610 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1611 in a block that we aren't copying during tree_function_versioning,
1612 just drop the clobber stmt. */
1613 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1615 tree lhs = gimple_assign_lhs (stmt);
1616 if (TREE_CODE (lhs) == MEM_REF
1617 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1619 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1620 if (gimple_bb (def_stmt)
1621 && !bitmap_bit_p (id->blocks_to_copy,
1622 gimple_bb (def_stmt)->index))
1623 return NULL;
1627 if (gimple_debug_bind_p (stmt))
1629 gdebug *copy
1630 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1631 gimple_debug_bind_get_value (stmt),
1632 stmt);
1633 if (id->reset_location)
1634 gimple_set_location (copy, input_location);
1635 id->debug_stmts.safe_push (copy);
1636 gimple_seq_add_stmt (&stmts, copy);
1637 return stmts;
1639 if (gimple_debug_source_bind_p (stmt))
1641 gdebug *copy = gimple_build_debug_source_bind
1642 (gimple_debug_source_bind_get_var (stmt),
1643 gimple_debug_source_bind_get_value (stmt),
1644 stmt);
1645 if (id->reset_location)
1646 gimple_set_location (copy, input_location);
1647 id->debug_stmts.safe_push (copy);
1648 gimple_seq_add_stmt (&stmts, copy);
1649 return stmts;
1651 if (gimple_debug_nonbind_marker_p (stmt))
1653 /* If the inlined function has too many debug markers,
1654 don't copy them. */
1655 if (id->src_cfun->debug_marker_count
1656 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1657 return stmts;
1659 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1660 if (id->reset_location)
1661 gimple_set_location (copy, input_location);
1662 id->debug_stmts.safe_push (copy);
1663 gimple_seq_add_stmt (&stmts, copy);
1664 return stmts;
1667 /* Create a new deep copy of the statement. */
1668 copy = gimple_copy (stmt);
1670 /* Clear flags that need revisiting. */
1671 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1673 if (gimple_call_tail_p (call_stmt))
1674 gimple_call_set_tail (call_stmt, false);
1675 if (gimple_call_from_thunk_p (call_stmt))
1676 gimple_call_set_from_thunk (call_stmt, false);
1677 if (gimple_call_internal_p (call_stmt))
1678 switch (gimple_call_internal_fn (call_stmt))
1680 case IFN_GOMP_SIMD_LANE:
1681 case IFN_GOMP_SIMD_VF:
1682 case IFN_GOMP_SIMD_LAST_LANE:
1683 case IFN_GOMP_SIMD_ORDERED_START:
1684 case IFN_GOMP_SIMD_ORDERED_END:
1685 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1686 break;
1687 default:
1688 break;
1692 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1693 RESX and EH_DISPATCH. */
1694 if (id->eh_map)
1695 switch (gimple_code (copy))
1697 case GIMPLE_CALL:
1699 tree r, fndecl = gimple_call_fndecl (copy);
1700 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1701 switch (DECL_FUNCTION_CODE (fndecl))
1703 case BUILT_IN_EH_COPY_VALUES:
1704 r = gimple_call_arg (copy, 1);
1705 r = remap_eh_region_tree_nr (r, id);
1706 gimple_call_set_arg (copy, 1, r);
1707 /* FALLTHRU */
1709 case BUILT_IN_EH_POINTER:
1710 case BUILT_IN_EH_FILTER:
1711 r = gimple_call_arg (copy, 0);
1712 r = remap_eh_region_tree_nr (r, id);
1713 gimple_call_set_arg (copy, 0, r);
1714 break;
1716 default:
1717 break;
1720 /* Reset alias info if we didn't apply measures to
1721 keep it valid over inlining by setting DECL_PT_UID. */
1722 if (!id->src_cfun->gimple_df
1723 || !id->src_cfun->gimple_df->ipa_pta)
1724 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1726 break;
1728 case GIMPLE_RESX:
1730 gresx *resx_stmt = as_a <gresx *> (copy);
1731 int r = gimple_resx_region (resx_stmt);
1732 r = remap_eh_region_nr (r, id);
1733 gimple_resx_set_region (resx_stmt, r);
1735 break;
1737 case GIMPLE_EH_DISPATCH:
1739 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1740 int r = gimple_eh_dispatch_region (eh_dispatch);
1741 r = remap_eh_region_nr (r, id);
1742 gimple_eh_dispatch_set_region (eh_dispatch, r);
1744 break;
1746 default:
1747 break;
1751 /* If STMT has a block defined, map it to the newly constructed block. */
1752 if (gimple_block (copy))
1754 tree *n;
1755 n = id->decl_map->get (gimple_block (copy));
1756 gcc_assert (n);
1757 gimple_set_block (copy, *n);
1760 if (id->reset_location)
1761 gimple_set_location (copy, input_location);
1763 /* Debug statements ought to be rebuilt and not copied. */
1764 gcc_checking_assert (!is_gimple_debug (copy));
1766 /* Remap all the operands in COPY. */
1767 memset (&wi, 0, sizeof (wi));
1768 wi.info = id;
1769 if (skip_first)
1770 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1771 else
1772 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1774 /* Clear the copied virtual operands. We are not remapping them here
1775 but are going to recreate them from scratch. */
1776 if (gimple_has_mem_ops (copy))
1778 gimple_set_vdef (copy, NULL_TREE);
1779 gimple_set_vuse (copy, NULL_TREE);
1782 gimple_seq_add_stmt (&stmts, copy);
1783 return stmts;
1787 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1788 later */
1790 static basic_block
1791 copy_bb (copy_body_data *id, basic_block bb,
1792 profile_count num, profile_count den)
1794 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1795 basic_block copy_basic_block;
1796 tree decl;
1797 basic_block prev;
1799 profile_count::adjust_for_ipa_scaling (&num, &den);
1801 /* Search for previous copied basic block. */
1802 prev = bb->prev_bb;
1803 while (!prev->aux)
1804 prev = prev->prev_bb;
1806 /* create_basic_block() will append every new block to
1807 basic_block_info automatically. */
1808 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1809 copy_basic_block->count = bb->count.apply_scale (num, den);
1811 copy_gsi = gsi_start_bb (copy_basic_block);
1813 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1815 gimple_seq stmts;
1816 gimple *stmt = gsi_stmt (gsi);
1817 gimple *orig_stmt = stmt;
1818 gimple_stmt_iterator stmts_gsi;
1819 bool stmt_added = false;
1821 id->regimplify = false;
1822 stmts = remap_gimple_stmt (stmt, id);
1824 if (gimple_seq_empty_p (stmts))
1825 continue;
1827 seq_gsi = copy_gsi;
1829 for (stmts_gsi = gsi_start (stmts);
1830 !gsi_end_p (stmts_gsi); )
1832 stmt = gsi_stmt (stmts_gsi);
1834 /* Advance iterator now before stmt is moved to seq_gsi. */
1835 gsi_next (&stmts_gsi);
1837 if (gimple_nop_p (stmt))
1838 continue;
1840 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1841 orig_stmt);
1843 /* With return slot optimization we can end up with
1844 non-gimple (foo *)&this->m, fix that here. */
1845 if (is_gimple_assign (stmt)
1846 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1847 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1849 tree new_rhs;
1850 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1851 gimple_assign_rhs1 (stmt),
1852 true, NULL, false,
1853 GSI_CONTINUE_LINKING);
1854 gimple_assign_set_rhs1 (stmt, new_rhs);
1855 id->regimplify = false;
1858 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1860 if (id->regimplify)
1861 gimple_regimplify_operands (stmt, &seq_gsi);
1863 stmt_added = true;
1866 if (!stmt_added)
1867 continue;
1869 /* If copy_basic_block has been empty at the start of this iteration,
1870 call gsi_start_bb again to get at the newly added statements. */
1871 if (gsi_end_p (copy_gsi))
1872 copy_gsi = gsi_start_bb (copy_basic_block);
1873 else
1874 gsi_next (&copy_gsi);
1876 /* Process the new statement. The call to gimple_regimplify_operands
1877 possibly turned the statement into multiple statements, we
1878 need to process all of them. */
1881 tree fn;
1882 gcall *call_stmt;
1884 stmt = gsi_stmt (copy_gsi);
1885 call_stmt = dyn_cast <gcall *> (stmt);
1886 if (call_stmt
1887 && gimple_call_va_arg_pack_p (call_stmt)
1888 && id->call_stmt
1889 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1891 /* __builtin_va_arg_pack () should be replaced by
1892 all arguments corresponding to ... in the caller. */
1893 tree p;
1894 gcall *new_call;
1895 vec<tree> argarray;
1896 size_t nargs = gimple_call_num_args (id->call_stmt);
1897 size_t n;
1899 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1900 nargs--;
1902 /* Create the new array of arguments. */
1903 n = nargs + gimple_call_num_args (call_stmt);
1904 argarray.create (n);
1905 argarray.safe_grow_cleared (n);
1907 /* Copy all the arguments before '...' */
1908 memcpy (argarray.address (),
1909 gimple_call_arg_ptr (call_stmt, 0),
1910 gimple_call_num_args (call_stmt) * sizeof (tree));
1912 /* Append the arguments passed in '...' */
1913 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1914 gimple_call_arg_ptr (id->call_stmt, 0)
1915 + (gimple_call_num_args (id->call_stmt) - nargs),
1916 nargs * sizeof (tree));
1918 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1919 argarray);
1921 argarray.release ();
1923 /* Copy all GIMPLE_CALL flags, location and block, except
1924 GF_CALL_VA_ARG_PACK. */
1925 gimple_call_copy_flags (new_call, call_stmt);
1926 gimple_call_set_va_arg_pack (new_call, false);
1927 gimple_set_location (new_call, gimple_location (stmt));
1928 gimple_set_block (new_call, gimple_block (stmt));
1929 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1931 gsi_replace (&copy_gsi, new_call, false);
1932 stmt = new_call;
1934 else if (call_stmt
1935 && id->call_stmt
1936 && (decl = gimple_call_fndecl (stmt))
1937 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1938 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN
1939 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1941 /* __builtin_va_arg_pack_len () should be replaced by
1942 the number of anonymous arguments. */
1943 size_t nargs = gimple_call_num_args (id->call_stmt);
1944 tree count, p;
1945 gimple *new_stmt;
1947 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1948 nargs--;
1950 count = build_int_cst (integer_type_node, nargs);
1951 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1952 gsi_replace (&copy_gsi, new_stmt, false);
1953 stmt = new_stmt;
1955 else if (call_stmt
1956 && id->call_stmt
1957 && gimple_call_internal_p (stmt)
1958 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1960 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1961 gsi_remove (&copy_gsi, false);
1962 continue;
1965 /* Statements produced by inlining can be unfolded, especially
1966 when we constant propagated some operands. We can't fold
1967 them right now for two reasons:
1968 1) folding require SSA_NAME_DEF_STMTs to be correct
1969 2) we can't change function calls to builtins.
1970 So we just mark statement for later folding. We mark
1971 all new statements, instead just statements that has changed
1972 by some nontrivial substitution so even statements made
1973 foldable indirectly are updated. If this turns out to be
1974 expensive, copy_body can be told to watch for nontrivial
1975 changes. */
1976 if (id->statements_to_fold)
1977 id->statements_to_fold->add (stmt);
1979 /* We're duplicating a CALL_EXPR. Find any corresponding
1980 callgraph edges and update or duplicate them. */
1981 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1983 struct cgraph_edge *edge;
1985 switch (id->transform_call_graph_edges)
1987 case CB_CGE_DUPLICATE:
1988 edge = id->src_node->get_edge (orig_stmt);
1989 if (edge)
1991 struct cgraph_edge *old_edge = edge;
1992 profile_count old_cnt = edge->count;
1993 edge = edge->clone (id->dst_node, call_stmt,
1994 gimple_uid (stmt),
1995 num, den,
1996 true);
1998 /* Speculative calls consist of two edges - direct and
1999 indirect. Duplicate the whole thing and distribute
2000 frequencies accordingly. */
2001 if (edge->speculative)
2003 struct cgraph_edge *direct, *indirect;
2004 struct ipa_ref *ref;
2006 gcc_assert (!edge->indirect_unknown_callee);
2007 old_edge->speculative_call_info (direct, indirect, ref);
2009 profile_count indir_cnt = indirect->count;
2010 indirect = indirect->clone (id->dst_node, call_stmt,
2011 gimple_uid (stmt),
2012 num, den,
2013 true);
2015 profile_probability prob
2016 = indir_cnt.probability_in (old_cnt + indir_cnt);
2017 indirect->count
2018 = copy_basic_block->count.apply_probability (prob);
2019 edge->count = copy_basic_block->count - indirect->count;
2020 id->dst_node->clone_reference (ref, stmt);
2022 else
2023 edge->count = copy_basic_block->count;
2025 break;
2027 case CB_CGE_MOVE_CLONES:
2028 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2029 call_stmt);
2030 edge = id->dst_node->get_edge (stmt);
2031 break;
2033 case CB_CGE_MOVE:
2034 edge = id->dst_node->get_edge (orig_stmt);
2035 if (edge)
2036 edge->set_call_stmt (call_stmt);
2037 break;
2039 default:
2040 gcc_unreachable ();
2043 /* Constant propagation on argument done during inlining
2044 may create new direct call. Produce an edge for it. */
2045 if ((!edge
2046 || (edge->indirect_inlining_edge
2047 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2048 && id->dst_node->definition
2049 && (fn = gimple_call_fndecl (stmt)) != NULL)
2051 struct cgraph_node *dest = cgraph_node::get_create (fn);
2053 /* We have missing edge in the callgraph. This can happen
2054 when previous inlining turned an indirect call into a
2055 direct call by constant propagating arguments or we are
2056 producing dead clone (for further cloning). In all
2057 other cases we hit a bug (incorrect node sharing is the
2058 most common reason for missing edges). */
2059 gcc_assert (!dest->definition
2060 || dest->address_taken
2061 || !id->src_node->definition
2062 || !id->dst_node->definition);
2063 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2064 id->dst_node->create_edge_including_clones
2065 (dest, orig_stmt, call_stmt, bb->count,
2066 CIF_ORIGINALLY_INDIRECT_CALL);
2067 else
2068 id->dst_node->create_edge (dest, call_stmt,
2069 bb->count)->inline_failed
2070 = CIF_ORIGINALLY_INDIRECT_CALL;
2071 if (dump_file)
2073 fprintf (dump_file, "Created new direct edge to %s\n",
2074 dest->name ());
2078 notice_special_calls (as_a <gcall *> (stmt));
2081 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2082 id->eh_map, id->eh_lp_nr);
2084 gsi_next (&copy_gsi);
2086 while (!gsi_end_p (copy_gsi));
2088 copy_gsi = gsi_last_bb (copy_basic_block);
2091 return copy_basic_block;
2094 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2095 form is quite easy, since dominator relationship for old basic blocks does
2096 not change.
2098 There is however exception where inlining might change dominator relation
2099 across EH edges from basic block within inlined functions destinating
2100 to landing pads in function we inline into.
2102 The function fills in PHI_RESULTs of such PHI nodes if they refer
2103 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2104 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2105 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2106 set, and this means that there will be no overlapping live ranges
2107 for the underlying symbol.
2109 This might change in future if we allow redirecting of EH edges and
2110 we might want to change way build CFG pre-inlining to include
2111 all the possible edges then. */
2112 static void
2113 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2114 bool can_throw, bool nonlocal_goto)
2116 edge e;
2117 edge_iterator ei;
2119 FOR_EACH_EDGE (e, ei, bb->succs)
2120 if (!e->dest->aux
2121 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2123 gphi *phi;
2124 gphi_iterator si;
2126 if (!nonlocal_goto)
2127 gcc_assert (e->flags & EDGE_EH);
2129 if (!can_throw)
2130 gcc_assert (!(e->flags & EDGE_EH));
2132 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2134 edge re;
2136 phi = si.phi ();
2138 /* For abnormal goto/call edges the receiver can be the
2139 ENTRY_BLOCK. Do not assert this cannot happen. */
2141 gcc_assert ((e->flags & EDGE_EH)
2142 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2144 re = find_edge (ret_bb, e->dest);
2145 gcc_checking_assert (re);
2146 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2147 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2149 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2150 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2156 /* Copy edges from BB into its copy constructed earlier, scale profile
2157 accordingly. Edges will be taken care of later. Assume aux
2158 pointers to point to the copies of each BB. Return true if any
2159 debug stmts are left after a statement that must end the basic block. */
2161 static bool
2162 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2163 basic_block ret_bb, basic_block abnormal_goto_dest,
2164 copy_body_data *id)
2166 basic_block new_bb = (basic_block) bb->aux;
2167 edge_iterator ei;
2168 edge old_edge;
2169 gimple_stmt_iterator si;
2170 bool need_debug_cleanup = false;
2172 /* Use the indices from the original blocks to create edges for the
2173 new ones. */
2174 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2175 if (!(old_edge->flags & EDGE_EH))
2177 edge new_edge;
2178 int flags = old_edge->flags;
2179 location_t locus = old_edge->goto_locus;
2181 /* Return edges do get a FALLTHRU flag when they get inlined. */
2182 if (old_edge->dest->index == EXIT_BLOCK
2183 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2184 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2185 flags |= EDGE_FALLTHRU;
2187 new_edge
2188 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2189 new_edge->probability = old_edge->probability;
2190 if (!id->reset_location)
2191 new_edge->goto_locus = remap_location (locus, id);
2194 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2195 return false;
2197 /* When doing function splitting, we must decreate count of the return block
2198 which was previously reachable by block we did not copy. */
2199 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2200 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2201 if (old_edge->src->index != ENTRY_BLOCK
2202 && !old_edge->src->aux)
2203 new_bb->count -= old_edge->count ().apply_scale (num, den);
2205 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2207 gimple *copy_stmt;
2208 bool can_throw, nonlocal_goto;
2210 copy_stmt = gsi_stmt (si);
2211 if (!is_gimple_debug (copy_stmt))
2212 update_stmt (copy_stmt);
2214 /* Do this before the possible split_block. */
2215 gsi_next (&si);
2217 /* If this tree could throw an exception, there are two
2218 cases where we need to add abnormal edge(s): the
2219 tree wasn't in a region and there is a "current
2220 region" in the caller; or the original tree had
2221 EH edges. In both cases split the block after the tree,
2222 and add abnormal edge(s) as needed; we need both
2223 those from the callee and the caller.
2224 We check whether the copy can throw, because the const
2225 propagation can change an INDIRECT_REF which throws
2226 into a COMPONENT_REF which doesn't. If the copy
2227 can throw, the original could also throw. */
2228 can_throw = stmt_can_throw_internal (copy_stmt);
2229 nonlocal_goto
2230 = (stmt_can_make_abnormal_goto (copy_stmt)
2231 && !computed_goto_p (copy_stmt));
2233 if (can_throw || nonlocal_goto)
2235 if (!gsi_end_p (si))
2237 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2238 gsi_next (&si);
2239 if (gsi_end_p (si))
2240 need_debug_cleanup = true;
2242 if (!gsi_end_p (si))
2243 /* Note that bb's predecessor edges aren't necessarily
2244 right at this point; split_block doesn't care. */
2246 edge e = split_block (new_bb, copy_stmt);
2248 new_bb = e->dest;
2249 new_bb->aux = e->src->aux;
2250 si = gsi_start_bb (new_bb);
2254 bool update_probs = false;
2256 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2258 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2259 update_probs = true;
2261 else if (can_throw)
2263 make_eh_edges (copy_stmt);
2264 update_probs = true;
2267 /* EH edges may not match old edges. Copy as much as possible. */
2268 if (update_probs)
2270 edge e;
2271 edge_iterator ei;
2272 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2274 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2275 if ((old_edge->flags & EDGE_EH)
2276 && (e = find_edge (copy_stmt_bb,
2277 (basic_block) old_edge->dest->aux))
2278 && (e->flags & EDGE_EH))
2279 e->probability = old_edge->probability;
2281 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2282 if ((e->flags & EDGE_EH) && !e->probability.initialized_p ())
2283 e->probability = profile_probability::never ();
2287 /* If the call we inline cannot make abnormal goto do not add
2288 additional abnormal edges but only retain those already present
2289 in the original function body. */
2290 if (abnormal_goto_dest == NULL)
2291 nonlocal_goto = false;
2292 if (nonlocal_goto)
2294 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2296 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2297 nonlocal_goto = false;
2298 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2299 in OpenMP regions which aren't allowed to be left abnormally.
2300 So, no need to add abnormal edge in that case. */
2301 else if (is_gimple_call (copy_stmt)
2302 && gimple_call_internal_p (copy_stmt)
2303 && (gimple_call_internal_fn (copy_stmt)
2304 == IFN_ABNORMAL_DISPATCHER)
2305 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2306 nonlocal_goto = false;
2307 else
2308 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2309 EDGE_ABNORMAL);
2312 if ((can_throw || nonlocal_goto)
2313 && gimple_in_ssa_p (cfun))
2314 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2315 can_throw, nonlocal_goto);
2317 return need_debug_cleanup;
2320 /* Copy the PHIs. All blocks and edges are copied, some blocks
2321 was possibly split and new outgoing EH edges inserted.
2322 BB points to the block of original function and AUX pointers links
2323 the original and newly copied blocks. */
2325 static void
2326 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2328 basic_block const new_bb = (basic_block) bb->aux;
2329 edge_iterator ei;
2330 gphi *phi;
2331 gphi_iterator si;
2332 edge new_edge;
2333 bool inserted = false;
2335 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2337 tree res, new_res;
2338 gphi *new_phi;
2340 phi = si.phi ();
2341 res = PHI_RESULT (phi);
2342 new_res = res;
2343 if (!virtual_operand_p (res))
2345 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2346 if (EDGE_COUNT (new_bb->preds) == 0)
2348 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2349 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2351 else
2353 new_phi = create_phi_node (new_res, new_bb);
2354 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2356 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2357 bb);
2358 tree arg;
2359 tree new_arg;
2360 edge_iterator ei2;
2361 location_t locus;
2363 /* When doing partial cloning, we allow PHIs on the entry
2364 block as long as all the arguments are the same.
2365 Find any input edge to see argument to copy. */
2366 if (!old_edge)
2367 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2368 if (!old_edge->src->aux)
2369 break;
2371 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2372 new_arg = arg;
2373 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2374 gcc_assert (new_arg);
2375 /* With return slot optimization we can end up with
2376 non-gimple (foo *)&this->m, fix that here. */
2377 if (TREE_CODE (new_arg) != SSA_NAME
2378 && TREE_CODE (new_arg) != FUNCTION_DECL
2379 && !is_gimple_val (new_arg))
2381 gimple_seq stmts = NULL;
2382 new_arg = force_gimple_operand (new_arg, &stmts, true,
2383 NULL);
2384 gsi_insert_seq_on_edge (new_edge, stmts);
2385 inserted = true;
2387 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2388 if (id->reset_location)
2389 locus = input_location;
2390 else
2391 locus = remap_location (locus, id);
2392 add_phi_arg (new_phi, new_arg, new_edge, locus);
2398 /* Commit the delayed edge insertions. */
2399 if (inserted)
2400 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2401 gsi_commit_one_edge_insert (new_edge, NULL);
2405 /* Wrapper for remap_decl so it can be used as a callback. */
2407 static tree
2408 remap_decl_1 (tree decl, void *data)
2410 return remap_decl (decl, (copy_body_data *) data);
2413 /* Build struct function and associated datastructures for the new clone
2414 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2415 the cfun to the function of new_fndecl (and current_function_decl too). */
2417 static void
2418 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2420 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2422 if (!DECL_ARGUMENTS (new_fndecl))
2423 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2424 if (!DECL_RESULT (new_fndecl))
2425 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2427 /* Register specific tree functions. */
2428 gimple_register_cfg_hooks ();
2430 /* Get clean struct function. */
2431 push_struct_function (new_fndecl);
2433 /* We will rebuild these, so just sanity check that they are empty. */
2434 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2435 gcc_assert (cfun->local_decls == NULL);
2436 gcc_assert (cfun->cfg == NULL);
2437 gcc_assert (cfun->decl == new_fndecl);
2439 /* Copy items we preserve during cloning. */
2440 cfun->static_chain_decl = src_cfun->static_chain_decl;
2441 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2442 cfun->function_end_locus = src_cfun->function_end_locus;
2443 cfun->curr_properties = src_cfun->curr_properties;
2444 cfun->last_verified = src_cfun->last_verified;
2445 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2446 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2447 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2448 cfun->stdarg = src_cfun->stdarg;
2449 cfun->after_inlining = src_cfun->after_inlining;
2450 cfun->can_throw_non_call_exceptions
2451 = src_cfun->can_throw_non_call_exceptions;
2452 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2453 cfun->returns_struct = src_cfun->returns_struct;
2454 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2456 init_empty_tree_cfg ();
2458 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2460 profile_count num = count;
2461 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2462 profile_count::adjust_for_ipa_scaling (&num, &den);
2464 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2465 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2466 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2467 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2468 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2469 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2470 if (src_cfun->eh)
2471 init_eh_for_function ();
2473 if (src_cfun->gimple_df)
2475 init_tree_ssa (cfun);
2476 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2477 if (cfun->gimple_df->in_ssa_p)
2478 init_ssa_operands (cfun);
2482 /* Helper function for copy_cfg_body. Move debug stmts from the end
2483 of NEW_BB to the beginning of successor basic blocks when needed. If the
2484 successor has multiple predecessors, reset them, otherwise keep
2485 their value. */
2487 static void
2488 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2490 edge e;
2491 edge_iterator ei;
2492 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2494 if (gsi_end_p (si)
2495 || gsi_one_before_end_p (si)
2496 || !(stmt_can_throw_internal (gsi_stmt (si))
2497 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2498 return;
2500 FOR_EACH_EDGE (e, ei, new_bb->succs)
2502 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2503 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2504 while (is_gimple_debug (gsi_stmt (ssi)))
2506 gimple *stmt = gsi_stmt (ssi);
2507 gdebug *new_stmt;
2508 tree var;
2509 tree value;
2511 /* For the last edge move the debug stmts instead of copying
2512 them. */
2513 if (ei_one_before_end_p (ei))
2515 si = ssi;
2516 gsi_prev (&ssi);
2517 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2519 gimple_debug_bind_reset_value (stmt);
2520 gimple_set_location (stmt, UNKNOWN_LOCATION);
2522 gsi_remove (&si, false);
2523 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2524 continue;
2527 if (gimple_debug_bind_p (stmt))
2529 var = gimple_debug_bind_get_var (stmt);
2530 if (single_pred_p (e->dest))
2532 value = gimple_debug_bind_get_value (stmt);
2533 value = unshare_expr (value);
2534 new_stmt = gimple_build_debug_bind (var, value, stmt);
2536 else
2537 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2539 else if (gimple_debug_source_bind_p (stmt))
2541 var = gimple_debug_source_bind_get_var (stmt);
2542 value = gimple_debug_source_bind_get_value (stmt);
2543 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2545 else if (gimple_debug_nonbind_marker_p (stmt))
2546 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2547 else
2548 gcc_unreachable ();
2549 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2550 id->debug_stmts.safe_push (new_stmt);
2551 gsi_prev (&ssi);
2556 /* Make a copy of the sub-loops of SRC_PARENT and place them
2557 as siblings of DEST_PARENT. */
2559 static void
2560 copy_loops (copy_body_data *id,
2561 struct loop *dest_parent, struct loop *src_parent)
2563 struct loop *src_loop = src_parent->inner;
2564 while (src_loop)
2566 if (!id->blocks_to_copy
2567 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2569 struct loop *dest_loop = alloc_loop ();
2571 /* Assign the new loop its header and latch and associate
2572 those with the new loop. */
2573 dest_loop->header = (basic_block)src_loop->header->aux;
2574 dest_loop->header->loop_father = dest_loop;
2575 if (src_loop->latch != NULL)
2577 dest_loop->latch = (basic_block)src_loop->latch->aux;
2578 dest_loop->latch->loop_father = dest_loop;
2581 /* Copy loop meta-data. */
2582 copy_loop_info (src_loop, dest_loop);
2584 /* Finally place it into the loop array and the loop tree. */
2585 place_new_loop (cfun, dest_loop);
2586 flow_loop_tree_node_add (dest_parent, dest_loop);
2588 dest_loop->safelen = src_loop->safelen;
2589 if (src_loop->unroll)
2591 dest_loop->unroll = src_loop->unroll;
2592 cfun->has_unroll = true;
2594 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2595 if (src_loop->force_vectorize)
2597 dest_loop->force_vectorize = true;
2598 cfun->has_force_vectorize_loops = true;
2600 if (src_loop->simduid)
2602 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2603 cfun->has_simduid_loops = true;
2606 /* Recurse. */
2607 copy_loops (id, dest_loop, src_loop);
2609 src_loop = src_loop->next;
2613 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2615 void
2616 redirect_all_calls (copy_body_data * id, basic_block bb)
2618 gimple_stmt_iterator si;
2619 gimple *last = last_stmt (bb);
2620 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2622 gimple *stmt = gsi_stmt (si);
2623 if (is_gimple_call (stmt))
2625 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2626 if (edge)
2628 edge->redirect_call_stmt_to_callee ();
2629 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2630 gimple_purge_dead_eh_edges (bb);
2636 /* Make a copy of the body of FN so that it can be inserted inline in
2637 another function. Walks FN via CFG, returns new fndecl. */
2639 static tree
2640 copy_cfg_body (copy_body_data * id,
2641 basic_block entry_block_map, basic_block exit_block_map,
2642 basic_block new_entry)
2644 tree callee_fndecl = id->src_fn;
2645 /* Original cfun for the callee, doesn't change. */
2646 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2647 struct function *cfun_to_copy;
2648 basic_block bb;
2649 tree new_fndecl = NULL;
2650 bool need_debug_cleanup = false;
2651 int last;
2652 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2653 profile_count num = entry_block_map->count;
2655 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2657 /* Register specific tree functions. */
2658 gimple_register_cfg_hooks ();
2660 /* If we are inlining just region of the function, make sure to connect
2661 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2662 part of loop, we must compute frequency and probability of
2663 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2664 probabilities of edges incoming from nonduplicated region. */
2665 if (new_entry)
2667 edge e;
2668 edge_iterator ei;
2669 den = profile_count::zero ();
2671 FOR_EACH_EDGE (e, ei, new_entry->preds)
2672 if (!e->src->aux)
2673 den += e->count ();
2674 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2677 profile_count::adjust_for_ipa_scaling (&num, &den);
2679 /* Must have a CFG here at this point. */
2680 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2681 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2684 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2685 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2686 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2687 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2689 /* Duplicate any exception-handling regions. */
2690 if (cfun->eh)
2691 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2692 remap_decl_1, id);
2694 /* Use aux pointers to map the original blocks to copy. */
2695 FOR_EACH_BB_FN (bb, cfun_to_copy)
2696 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2698 basic_block new_bb = copy_bb (id, bb, num, den);
2699 bb->aux = new_bb;
2700 new_bb->aux = bb;
2701 new_bb->loop_father = entry_block_map->loop_father;
2704 last = last_basic_block_for_fn (cfun);
2706 /* Now that we've duplicated the blocks, duplicate their edges. */
2707 basic_block abnormal_goto_dest = NULL;
2708 if (id->call_stmt
2709 && stmt_can_make_abnormal_goto (id->call_stmt))
2711 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2713 bb = gimple_bb (id->call_stmt);
2714 gsi_next (&gsi);
2715 if (gsi_end_p (gsi))
2716 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2718 FOR_ALL_BB_FN (bb, cfun_to_copy)
2719 if (!id->blocks_to_copy
2720 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2721 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2722 abnormal_goto_dest, id);
2724 if (new_entry)
2726 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2727 EDGE_FALLTHRU);
2728 e->probability = profile_probability::always ();
2731 /* Duplicate the loop tree, if available and wanted. */
2732 if (loops_for_fn (src_cfun) != NULL
2733 && current_loops != NULL)
2735 copy_loops (id, entry_block_map->loop_father,
2736 get_loop (src_cfun, 0));
2737 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2738 loops_state_set (LOOPS_NEED_FIXUP);
2741 /* If the loop tree in the source function needed fixup, mark the
2742 destination loop tree for fixup, too. */
2743 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2744 loops_state_set (LOOPS_NEED_FIXUP);
2746 if (gimple_in_ssa_p (cfun))
2747 FOR_ALL_BB_FN (bb, cfun_to_copy)
2748 if (!id->blocks_to_copy
2749 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2750 copy_phis_for_bb (bb, id);
2752 FOR_ALL_BB_FN (bb, cfun_to_copy)
2753 if (bb->aux)
2755 if (need_debug_cleanup
2756 && bb->index != ENTRY_BLOCK
2757 && bb->index != EXIT_BLOCK)
2758 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2759 /* Update call edge destinations. This can not be done before loop
2760 info is updated, because we may split basic blocks. */
2761 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2762 && bb->index != ENTRY_BLOCK
2763 && bb->index != EXIT_BLOCK)
2764 redirect_all_calls (id, (basic_block)bb->aux);
2765 ((basic_block)bb->aux)->aux = NULL;
2766 bb->aux = NULL;
2769 /* Zero out AUX fields of newly created block during EH edge
2770 insertion. */
2771 for (; last < last_basic_block_for_fn (cfun); last++)
2773 if (need_debug_cleanup)
2774 maybe_move_debug_stmts_to_successors (id,
2775 BASIC_BLOCK_FOR_FN (cfun, last));
2776 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2777 /* Update call edge destinations. This can not be done before loop
2778 info is updated, because we may split basic blocks. */
2779 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2780 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2782 entry_block_map->aux = NULL;
2783 exit_block_map->aux = NULL;
2785 if (id->eh_map)
2787 delete id->eh_map;
2788 id->eh_map = NULL;
2790 if (id->dependence_map)
2792 delete id->dependence_map;
2793 id->dependence_map = NULL;
2796 return new_fndecl;
2799 /* Copy the debug STMT using ID. We deal with these statements in a
2800 special way: if any variable in their VALUE expression wasn't
2801 remapped yet, we won't remap it, because that would get decl uids
2802 out of sync, causing codegen differences between -g and -g0. If
2803 this arises, we drop the VALUE expression altogether. */
2805 static void
2806 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2808 tree t, *n;
2809 struct walk_stmt_info wi;
2811 if (gimple_block (stmt))
2813 n = id->decl_map->get (gimple_block (stmt));
2814 gimple_set_block (stmt, n ? *n : id->block);
2817 if (gimple_debug_nonbind_marker_p (stmt))
2818 return;
2820 /* Remap all the operands in COPY. */
2821 memset (&wi, 0, sizeof (wi));
2822 wi.info = id;
2824 processing_debug_stmt = 1;
2826 if (gimple_debug_source_bind_p (stmt))
2827 t = gimple_debug_source_bind_get_var (stmt);
2828 else if (gimple_debug_bind_p (stmt))
2829 t = gimple_debug_bind_get_var (stmt);
2830 else
2831 gcc_unreachable ();
2833 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2834 && (n = id->debug_map->get (t)))
2836 gcc_assert (VAR_P (*n));
2837 t = *n;
2839 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2840 /* T is a non-localized variable. */;
2841 else
2842 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2844 if (gimple_debug_bind_p (stmt))
2846 gimple_debug_bind_set_var (stmt, t);
2848 if (gimple_debug_bind_has_value_p (stmt))
2849 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2850 remap_gimple_op_r, &wi, NULL);
2852 /* Punt if any decl couldn't be remapped. */
2853 if (processing_debug_stmt < 0)
2854 gimple_debug_bind_reset_value (stmt);
2856 else if (gimple_debug_source_bind_p (stmt))
2858 gimple_debug_source_bind_set_var (stmt, t);
2859 /* When inlining and source bind refers to one of the optimized
2860 away parameters, change the source bind into normal debug bind
2861 referring to the corresponding DEBUG_EXPR_DECL that should have
2862 been bound before the call stmt. */
2863 t = gimple_debug_source_bind_get_value (stmt);
2864 if (t != NULL_TREE
2865 && TREE_CODE (t) == PARM_DECL
2866 && id->call_stmt)
2868 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2869 unsigned int i;
2870 if (debug_args != NULL)
2872 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2873 if ((**debug_args)[i] == DECL_ORIGIN (t)
2874 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2876 t = (**debug_args)[i + 1];
2877 stmt->subcode = GIMPLE_DEBUG_BIND;
2878 gimple_debug_bind_set_value (stmt, t);
2879 break;
2883 if (gimple_debug_source_bind_p (stmt))
2884 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2885 remap_gimple_op_r, &wi, NULL);
2888 processing_debug_stmt = 0;
2890 update_stmt (stmt);
2893 /* Process deferred debug stmts. In order to give values better odds
2894 of being successfully remapped, we delay the processing of debug
2895 stmts until all other stmts that might require remapping are
2896 processed. */
2898 static void
2899 copy_debug_stmts (copy_body_data *id)
2901 size_t i;
2902 gdebug *stmt;
2904 if (!id->debug_stmts.exists ())
2905 return;
2907 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2908 copy_debug_stmt (stmt, id);
2910 id->debug_stmts.release ();
2913 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2914 another function. */
2916 static tree
2917 copy_tree_body (copy_body_data *id)
2919 tree fndecl = id->src_fn;
2920 tree body = DECL_SAVED_TREE (fndecl);
2922 walk_tree (&body, copy_tree_body_r, id, NULL);
2924 return body;
2927 /* Make a copy of the body of FN so that it can be inserted inline in
2928 another function. */
2930 static tree
2931 copy_body (copy_body_data *id,
2932 basic_block entry_block_map, basic_block exit_block_map,
2933 basic_block new_entry)
2935 tree fndecl = id->src_fn;
2936 tree body;
2938 /* If this body has a CFG, walk CFG and copy. */
2939 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2940 body = copy_cfg_body (id, entry_block_map, exit_block_map,
2941 new_entry);
2942 copy_debug_stmts (id);
2944 return body;
2947 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2948 defined in function FN, or of a data member thereof. */
2950 static bool
2951 self_inlining_addr_expr (tree value, tree fn)
2953 tree var;
2955 if (TREE_CODE (value) != ADDR_EXPR)
2956 return false;
2958 var = get_base_address (TREE_OPERAND (value, 0));
2960 return var && auto_var_in_fn_p (var, fn);
2963 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2964 lexical block and line number information from base_stmt, if given,
2965 or from the last stmt of the block otherwise. */
2967 static gimple *
2968 insert_init_debug_bind (copy_body_data *id,
2969 basic_block bb, tree var, tree value,
2970 gimple *base_stmt)
2972 gimple *note;
2973 gimple_stmt_iterator gsi;
2974 tree tracked_var;
2976 if (!gimple_in_ssa_p (id->src_cfun))
2977 return NULL;
2979 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
2980 return NULL;
2982 tracked_var = target_for_debug_bind (var);
2983 if (!tracked_var)
2984 return NULL;
2986 if (bb)
2988 gsi = gsi_last_bb (bb);
2989 if (!base_stmt && !gsi_end_p (gsi))
2990 base_stmt = gsi_stmt (gsi);
2993 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
2995 if (bb)
2997 if (!gsi_end_p (gsi))
2998 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2999 else
3000 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3003 return note;
3006 static void
3007 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3009 /* If VAR represents a zero-sized variable, it's possible that the
3010 assignment statement may result in no gimple statements. */
3011 if (init_stmt)
3013 gimple_stmt_iterator si = gsi_last_bb (bb);
3015 /* We can end up with init statements that store to a non-register
3016 from a rhs with a conversion. Handle that here by forcing the
3017 rhs into a temporary. gimple_regimplify_operands is not
3018 prepared to do this for us. */
3019 if (!is_gimple_debug (init_stmt)
3020 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3021 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3022 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3024 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3025 gimple_expr_type (init_stmt),
3026 gimple_assign_rhs1 (init_stmt));
3027 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3028 GSI_NEW_STMT);
3029 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3030 gimple_assign_set_rhs1 (init_stmt, rhs);
3032 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3033 gimple_regimplify_operands (init_stmt, &si);
3035 if (!is_gimple_debug (init_stmt))
3037 tree def = gimple_assign_lhs (init_stmt);
3038 insert_init_debug_bind (id, bb, def, def, init_stmt);
3043 /* Initialize parameter P with VALUE. If needed, produce init statement
3044 at the end of BB. When BB is NULL, we return init statement to be
3045 output later. */
3046 static gimple *
3047 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3048 basic_block bb, tree *vars)
3050 gimple *init_stmt = NULL;
3051 tree var;
3052 tree rhs = value;
3053 tree def = (gimple_in_ssa_p (cfun)
3054 ? ssa_default_def (id->src_cfun, p) : NULL);
3056 if (value
3057 && value != error_mark_node
3058 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3060 /* If we can match up types by promotion/demotion do so. */
3061 if (fold_convertible_p (TREE_TYPE (p), value))
3062 rhs = fold_convert (TREE_TYPE (p), value);
3063 else
3065 /* ??? For valid programs we should not end up here.
3066 Still if we end up with truly mismatched types here, fall back
3067 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3068 GIMPLE to the following passes. */
3069 if (!is_gimple_reg_type (TREE_TYPE (value))
3070 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3071 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3072 else
3073 rhs = build_zero_cst (TREE_TYPE (p));
3077 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3078 here since the type of this decl must be visible to the calling
3079 function. */
3080 var = copy_decl_to_var (p, id);
3082 /* Declare this new variable. */
3083 DECL_CHAIN (var) = *vars;
3084 *vars = var;
3086 /* Make gimplifier happy about this variable. */
3087 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3089 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3090 we would not need to create a new variable here at all, if it
3091 weren't for debug info. Still, we can just use the argument
3092 value. */
3093 if (TREE_READONLY (p)
3094 && !TREE_ADDRESSABLE (p)
3095 && value && !TREE_SIDE_EFFECTS (value)
3096 && !def)
3098 /* We may produce non-gimple trees by adding NOPs or introduce
3099 invalid sharing when operand is not really constant.
3100 It is not big deal to prohibit constant propagation here as
3101 we will constant propagate in DOM1 pass anyway. */
3102 if (is_gimple_min_invariant (value)
3103 && useless_type_conversion_p (TREE_TYPE (p),
3104 TREE_TYPE (value))
3105 /* We have to be very careful about ADDR_EXPR. Make sure
3106 the base variable isn't a local variable of the inlined
3107 function, e.g., when doing recursive inlining, direct or
3108 mutually-recursive or whatever, which is why we don't
3109 just test whether fn == current_function_decl. */
3110 && ! self_inlining_addr_expr (value, fn))
3112 insert_decl_map (id, p, value);
3113 insert_debug_decl_map (id, p, var);
3114 return insert_init_debug_bind (id, bb, var, value, NULL);
3118 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3119 that way, when the PARM_DECL is encountered, it will be
3120 automatically replaced by the VAR_DECL. */
3121 insert_decl_map (id, p, var);
3123 /* Even if P was TREE_READONLY, the new VAR should not be.
3124 In the original code, we would have constructed a
3125 temporary, and then the function body would have never
3126 changed the value of P. However, now, we will be
3127 constructing VAR directly. The constructor body may
3128 change its value multiple times as it is being
3129 constructed. Therefore, it must not be TREE_READONLY;
3130 the back-end assumes that TREE_READONLY variable is
3131 assigned to only once. */
3132 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3133 TREE_READONLY (var) = 0;
3135 /* If there is no setup required and we are in SSA, take the easy route
3136 replacing all SSA names representing the function parameter by the
3137 SSA name passed to function.
3139 We need to construct map for the variable anyway as it might be used
3140 in different SSA names when parameter is set in function.
3142 Do replacement at -O0 for const arguments replaced by constant.
3143 This is important for builtin_constant_p and other construct requiring
3144 constant argument to be visible in inlined function body. */
3145 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3146 && (optimize
3147 || (TREE_READONLY (p)
3148 && is_gimple_min_invariant (rhs)))
3149 && (TREE_CODE (rhs) == SSA_NAME
3150 || is_gimple_min_invariant (rhs))
3151 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3153 insert_decl_map (id, def, rhs);
3154 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3157 /* If the value of argument is never used, don't care about initializing
3158 it. */
3159 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3161 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3162 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3165 /* Initialize this VAR_DECL from the equivalent argument. Convert
3166 the argument to the proper type in case it was promoted. */
3167 if (value)
3169 if (rhs == error_mark_node)
3171 insert_decl_map (id, p, var);
3172 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3175 STRIP_USELESS_TYPE_CONVERSION (rhs);
3177 /* If we are in SSA form properly remap the default definition
3178 or assign to a dummy SSA name if the parameter is unused and
3179 we are not optimizing. */
3180 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3182 if (def)
3184 def = remap_ssa_name (def, id);
3185 init_stmt = gimple_build_assign (def, rhs);
3186 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3187 set_ssa_default_def (cfun, var, NULL);
3189 else if (!optimize)
3191 def = make_ssa_name (var);
3192 init_stmt = gimple_build_assign (def, rhs);
3195 else
3196 init_stmt = gimple_build_assign (var, rhs);
3198 if (bb && init_stmt)
3199 insert_init_stmt (id, bb, init_stmt);
3201 return init_stmt;
3204 /* Generate code to initialize the parameters of the function at the
3205 top of the stack in ID from the GIMPLE_CALL STMT. */
3207 static void
3208 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3209 tree fn, basic_block bb)
3211 tree parms;
3212 size_t i;
3213 tree p;
3214 tree vars = NULL_TREE;
3215 tree static_chain = gimple_call_chain (stmt);
3217 /* Figure out what the parameters are. */
3218 parms = DECL_ARGUMENTS (fn);
3220 /* Loop through the parameter declarations, replacing each with an
3221 equivalent VAR_DECL, appropriately initialized. */
3222 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3224 tree val;
3225 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3226 setup_one_parameter (id, p, val, fn, bb, &vars);
3228 /* After remapping parameters remap their types. This has to be done
3229 in a second loop over all parameters to appropriately remap
3230 variable sized arrays when the size is specified in a
3231 parameter following the array. */
3232 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3234 tree *varp = id->decl_map->get (p);
3235 if (varp && VAR_P (*varp))
3237 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3238 ? ssa_default_def (id->src_cfun, p) : NULL);
3239 tree var = *varp;
3240 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3241 /* Also remap the default definition if it was remapped
3242 to the default definition of the parameter replacement
3243 by the parameter setup. */
3244 if (def)
3246 tree *defp = id->decl_map->get (def);
3247 if (defp
3248 && TREE_CODE (*defp) == SSA_NAME
3249 && SSA_NAME_VAR (*defp) == var)
3250 TREE_TYPE (*defp) = TREE_TYPE (var);
3255 /* Initialize the static chain. */
3256 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3257 gcc_assert (fn != current_function_decl);
3258 if (p)
3260 /* No static chain? Seems like a bug in tree-nested.c. */
3261 gcc_assert (static_chain);
3263 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3266 declare_inline_vars (id->block, vars);
3270 /* Declare a return variable to replace the RESULT_DECL for the
3271 function we are calling. An appropriate DECL_STMT is returned.
3272 The USE_STMT is filled to contain a use of the declaration to
3273 indicate the return value of the function.
3275 RETURN_SLOT, if non-null is place where to store the result. It
3276 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3277 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3279 The return value is a (possibly null) value that holds the result
3280 as seen by the caller. */
3282 static tree
3283 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3284 basic_block entry_bb)
3286 tree callee = id->src_fn;
3287 tree result = DECL_RESULT (callee);
3288 tree callee_type = TREE_TYPE (result);
3289 tree caller_type;
3290 tree var, use;
3292 /* Handle type-mismatches in the function declaration return type
3293 vs. the call expression. */
3294 if (modify_dest)
3295 caller_type = TREE_TYPE (modify_dest);
3296 else
3297 caller_type = TREE_TYPE (TREE_TYPE (callee));
3299 /* We don't need to do anything for functions that don't return anything. */
3300 if (VOID_TYPE_P (callee_type))
3301 return NULL_TREE;
3303 /* If there was a return slot, then the return value is the
3304 dereferenced address of that object. */
3305 if (return_slot)
3307 /* The front end shouldn't have used both return_slot and
3308 a modify expression. */
3309 gcc_assert (!modify_dest);
3310 if (DECL_BY_REFERENCE (result))
3312 tree return_slot_addr = build_fold_addr_expr (return_slot);
3313 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3315 /* We are going to construct *&return_slot and we can't do that
3316 for variables believed to be not addressable.
3318 FIXME: This check possibly can match, because values returned
3319 via return slot optimization are not believed to have address
3320 taken by alias analysis. */
3321 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3322 var = return_slot_addr;
3324 else
3326 var = return_slot;
3327 gcc_assert (TREE_CODE (var) != SSA_NAME);
3328 if (TREE_ADDRESSABLE (result))
3329 mark_addressable (var);
3331 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3332 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3333 && !DECL_GIMPLE_REG_P (result)
3334 && DECL_P (var))
3335 DECL_GIMPLE_REG_P (var) = 0;
3336 use = NULL;
3337 goto done;
3340 /* All types requiring non-trivial constructors should have been handled. */
3341 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3343 /* Attempt to avoid creating a new temporary variable. */
3344 if (modify_dest
3345 && TREE_CODE (modify_dest) != SSA_NAME)
3347 bool use_it = false;
3349 /* We can't use MODIFY_DEST if there's type promotion involved. */
3350 if (!useless_type_conversion_p (callee_type, caller_type))
3351 use_it = false;
3353 /* ??? If we're assigning to a variable sized type, then we must
3354 reuse the destination variable, because we've no good way to
3355 create variable sized temporaries at this point. */
3356 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3357 use_it = true;
3359 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3360 reuse it as the result of the call directly. Don't do this if
3361 it would promote MODIFY_DEST to addressable. */
3362 else if (TREE_ADDRESSABLE (result))
3363 use_it = false;
3364 else
3366 tree base_m = get_base_address (modify_dest);
3368 /* If the base isn't a decl, then it's a pointer, and we don't
3369 know where that's going to go. */
3370 if (!DECL_P (base_m))
3371 use_it = false;
3372 else if (is_global_var (base_m))
3373 use_it = false;
3374 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3375 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3376 && !DECL_GIMPLE_REG_P (result)
3377 && DECL_GIMPLE_REG_P (base_m))
3378 use_it = false;
3379 else if (!TREE_ADDRESSABLE (base_m))
3380 use_it = true;
3383 if (use_it)
3385 var = modify_dest;
3386 use = NULL;
3387 goto done;
3391 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3393 var = copy_result_decl_to_var (result, id);
3394 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3396 /* Do not have the rest of GCC warn about this variable as it should
3397 not be visible to the user. */
3398 TREE_NO_WARNING (var) = 1;
3400 declare_inline_vars (id->block, var);
3402 /* Build the use expr. If the return type of the function was
3403 promoted, convert it back to the expected type. */
3404 use = var;
3405 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3407 /* If we can match up types by promotion/demotion do so. */
3408 if (fold_convertible_p (caller_type, var))
3409 use = fold_convert (caller_type, var);
3410 else
3412 /* ??? For valid programs we should not end up here.
3413 Still if we end up with truly mismatched types here, fall back
3414 to using a MEM_REF to not leak invalid GIMPLE to the following
3415 passes. */
3416 /* Prevent var from being written into SSA form. */
3417 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3418 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3419 DECL_GIMPLE_REG_P (var) = false;
3420 else if (is_gimple_reg_type (TREE_TYPE (var)))
3421 TREE_ADDRESSABLE (var) = true;
3422 use = fold_build2 (MEM_REF, caller_type,
3423 build_fold_addr_expr (var),
3424 build_int_cst (ptr_type_node, 0));
3428 STRIP_USELESS_TYPE_CONVERSION (use);
3430 if (DECL_BY_REFERENCE (result))
3432 TREE_ADDRESSABLE (var) = 1;
3433 var = build_fold_addr_expr (var);
3436 done:
3437 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3438 way, when the RESULT_DECL is encountered, it will be
3439 automatically replaced by the VAR_DECL.
3441 When returning by reference, ensure that RESULT_DECL remaps to
3442 gimple_val. */
3443 if (DECL_BY_REFERENCE (result)
3444 && !is_gimple_val (var))
3446 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3447 insert_decl_map (id, result, temp);
3448 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3449 it's default_def SSA_NAME. */
3450 if (gimple_in_ssa_p (id->src_cfun)
3451 && is_gimple_reg (result))
3453 temp = make_ssa_name (temp);
3454 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3456 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3458 else
3459 insert_decl_map (id, result, var);
3461 /* Remember this so we can ignore it in remap_decls. */
3462 id->retvar = var;
3463 return use;
3466 /* Determine if the function can be copied. If so return NULL. If
3467 not return a string describng the reason for failure. */
3469 const char *
3470 copy_forbidden (struct function *fun)
3472 const char *reason = fun->cannot_be_copied_reason;
3474 /* Only examine the function once. */
3475 if (fun->cannot_be_copied_set)
3476 return reason;
3478 /* We cannot copy a function that receives a non-local goto
3479 because we cannot remap the destination label used in the
3480 function that is performing the non-local goto. */
3481 /* ??? Actually, this should be possible, if we work at it.
3482 No doubt there's just a handful of places that simply
3483 assume it doesn't happen and don't substitute properly. */
3484 if (fun->has_nonlocal_label)
3486 reason = G_("function %q+F can never be copied "
3487 "because it receives a non-local goto");
3488 goto fail;
3491 if (fun->has_forced_label_in_static)
3493 reason = G_("function %q+F can never be copied because it saves "
3494 "address of local label in a static variable");
3495 goto fail;
3498 fail:
3499 fun->cannot_be_copied_reason = reason;
3500 fun->cannot_be_copied_set = true;
3501 return reason;
3505 static const char *inline_forbidden_reason;
3507 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3508 iff a function can not be inlined. Also sets the reason why. */
3510 static tree
3511 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3512 struct walk_stmt_info *wip)
3514 tree fn = (tree) wip->info;
3515 tree t;
3516 gimple *stmt = gsi_stmt (*gsi);
3518 switch (gimple_code (stmt))
3520 case GIMPLE_CALL:
3521 /* Refuse to inline alloca call unless user explicitly forced so as
3522 this may change program's memory overhead drastically when the
3523 function using alloca is called in loop. In GCC present in
3524 SPEC2000 inlining into schedule_block cause it to require 2GB of
3525 RAM instead of 256MB. Don't do so for alloca calls emitted for
3526 VLA objects as those can't cause unbounded growth (they're always
3527 wrapped inside stack_save/stack_restore regions. */
3528 if (gimple_maybe_alloca_call_p (stmt)
3529 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3530 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3532 inline_forbidden_reason
3533 = G_("function %q+F can never be inlined because it uses "
3534 "alloca (override using the always_inline attribute)");
3535 *handled_ops_p = true;
3536 return fn;
3539 t = gimple_call_fndecl (stmt);
3540 if (t == NULL_TREE)
3541 break;
3543 /* We cannot inline functions that call setjmp. */
3544 if (setjmp_call_p (t))
3546 inline_forbidden_reason
3547 = G_("function %q+F can never be inlined because it uses setjmp");
3548 *handled_ops_p = true;
3549 return t;
3552 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3553 switch (DECL_FUNCTION_CODE (t))
3555 /* We cannot inline functions that take a variable number of
3556 arguments. */
3557 case BUILT_IN_VA_START:
3558 case BUILT_IN_NEXT_ARG:
3559 case BUILT_IN_VA_END:
3560 inline_forbidden_reason
3561 = G_("function %q+F can never be inlined because it "
3562 "uses variable argument lists");
3563 *handled_ops_p = true;
3564 return t;
3566 case BUILT_IN_LONGJMP:
3567 /* We can't inline functions that call __builtin_longjmp at
3568 all. The non-local goto machinery really requires the
3569 destination be in a different function. If we allow the
3570 function calling __builtin_longjmp to be inlined into the
3571 function calling __builtin_setjmp, Things will Go Awry. */
3572 inline_forbidden_reason
3573 = G_("function %q+F can never be inlined because "
3574 "it uses setjmp-longjmp exception handling");
3575 *handled_ops_p = true;
3576 return t;
3578 case BUILT_IN_NONLOCAL_GOTO:
3579 /* Similarly. */
3580 inline_forbidden_reason
3581 = G_("function %q+F can never be inlined because "
3582 "it uses non-local goto");
3583 *handled_ops_p = true;
3584 return t;
3586 case BUILT_IN_RETURN:
3587 case BUILT_IN_APPLY_ARGS:
3588 /* If a __builtin_apply_args caller would be inlined,
3589 it would be saving arguments of the function it has
3590 been inlined into. Similarly __builtin_return would
3591 return from the function the inline has been inlined into. */
3592 inline_forbidden_reason
3593 = G_("function %q+F can never be inlined because "
3594 "it uses __builtin_return or __builtin_apply_args");
3595 *handled_ops_p = true;
3596 return t;
3598 default:
3599 break;
3601 break;
3603 case GIMPLE_GOTO:
3604 t = gimple_goto_dest (stmt);
3606 /* We will not inline a function which uses computed goto. The
3607 addresses of its local labels, which may be tucked into
3608 global storage, are of course not constant across
3609 instantiations, which causes unexpected behavior. */
3610 if (TREE_CODE (t) != LABEL_DECL)
3612 inline_forbidden_reason
3613 = G_("function %q+F can never be inlined "
3614 "because it contains a computed goto");
3615 *handled_ops_p = true;
3616 return t;
3618 break;
3620 default:
3621 break;
3624 *handled_ops_p = false;
3625 return NULL_TREE;
3628 /* Return true if FNDECL is a function that cannot be inlined into
3629 another one. */
3631 static bool
3632 inline_forbidden_p (tree fndecl)
3634 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3635 struct walk_stmt_info wi;
3636 basic_block bb;
3637 bool forbidden_p = false;
3639 /* First check for shared reasons not to copy the code. */
3640 inline_forbidden_reason = copy_forbidden (fun);
3641 if (inline_forbidden_reason != NULL)
3642 return true;
3644 /* Next, walk the statements of the function looking for
3645 constraucts we can't handle, or are non-optimal for inlining. */
3646 hash_set<tree> visited_nodes;
3647 memset (&wi, 0, sizeof (wi));
3648 wi.info = (void *) fndecl;
3649 wi.pset = &visited_nodes;
3651 FOR_EACH_BB_FN (bb, fun)
3653 gimple *ret;
3654 gimple_seq seq = bb_seq (bb);
3655 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3656 forbidden_p = (ret != NULL);
3657 if (forbidden_p)
3658 break;
3661 return forbidden_p;
3664 /* Return false if the function FNDECL cannot be inlined on account of its
3665 attributes, true otherwise. */
3666 static bool
3667 function_attribute_inlinable_p (const_tree fndecl)
3669 if (targetm.attribute_table)
3671 const_tree a;
3673 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3675 const_tree name = TREE_PURPOSE (a);
3676 int i;
3678 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3679 if (is_attribute_p (targetm.attribute_table[i].name, name))
3680 return targetm.function_attribute_inlinable_p (fndecl);
3684 return true;
3687 /* Returns nonzero if FN is a function that does not have any
3688 fundamental inline blocking properties. */
3690 bool
3691 tree_inlinable_function_p (tree fn)
3693 bool inlinable = true;
3694 bool do_warning;
3695 tree always_inline;
3697 /* If we've already decided this function shouldn't be inlined,
3698 there's no need to check again. */
3699 if (DECL_UNINLINABLE (fn))
3700 return false;
3702 /* We only warn for functions declared `inline' by the user. */
3703 do_warning = (warn_inline
3704 && DECL_DECLARED_INLINE_P (fn)
3705 && !DECL_NO_INLINE_WARNING_P (fn)
3706 && !DECL_IN_SYSTEM_HEADER (fn));
3708 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3710 if (flag_no_inline
3711 && always_inline == NULL)
3713 if (do_warning)
3714 warning (OPT_Winline, "function %q+F can never be inlined because it "
3715 "is suppressed using -fno-inline", fn);
3716 inlinable = false;
3719 else if (!function_attribute_inlinable_p (fn))
3721 if (do_warning)
3722 warning (OPT_Winline, "function %q+F can never be inlined because it "
3723 "uses attributes conflicting with inlining", fn);
3724 inlinable = false;
3727 else if (inline_forbidden_p (fn))
3729 /* See if we should warn about uninlinable functions. Previously,
3730 some of these warnings would be issued while trying to expand
3731 the function inline, but that would cause multiple warnings
3732 about functions that would for example call alloca. But since
3733 this a property of the function, just one warning is enough.
3734 As a bonus we can now give more details about the reason why a
3735 function is not inlinable. */
3736 if (always_inline)
3737 error (inline_forbidden_reason, fn);
3738 else if (do_warning)
3739 warning (OPT_Winline, inline_forbidden_reason, fn);
3741 inlinable = false;
3744 /* Squirrel away the result so that we don't have to check again. */
3745 DECL_UNINLINABLE (fn) = !inlinable;
3747 return inlinable;
3750 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3751 word size and take possible memcpy call into account and return
3752 cost based on whether optimizing for size or speed according to SPEED_P. */
3755 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3757 HOST_WIDE_INT size;
3759 gcc_assert (!VOID_TYPE_P (type));
3761 if (TREE_CODE (type) == VECTOR_TYPE)
3763 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3764 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3765 int orig_mode_size
3766 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3767 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3768 return ((orig_mode_size + simd_mode_size - 1)
3769 / simd_mode_size);
3772 size = int_size_in_bytes (type);
3774 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3775 /* Cost of a memcpy call, 3 arguments and the call. */
3776 return 4;
3777 else
3778 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3781 /* Returns cost of operation CODE, according to WEIGHTS */
3783 static int
3784 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3785 tree op1 ATTRIBUTE_UNUSED, tree op2)
3787 switch (code)
3789 /* These are "free" conversions, or their presumed cost
3790 is folded into other operations. */
3791 case RANGE_EXPR:
3792 CASE_CONVERT:
3793 case COMPLEX_EXPR:
3794 case PAREN_EXPR:
3795 case VIEW_CONVERT_EXPR:
3796 return 0;
3798 /* Assign cost of 1 to usual operations.
3799 ??? We may consider mapping RTL costs to this. */
3800 case COND_EXPR:
3801 case VEC_COND_EXPR:
3802 case VEC_PERM_EXPR:
3804 case PLUS_EXPR:
3805 case POINTER_PLUS_EXPR:
3806 case POINTER_DIFF_EXPR:
3807 case MINUS_EXPR:
3808 case MULT_EXPR:
3809 case MULT_HIGHPART_EXPR:
3811 case ADDR_SPACE_CONVERT_EXPR:
3812 case FIXED_CONVERT_EXPR:
3813 case FIX_TRUNC_EXPR:
3815 case NEGATE_EXPR:
3816 case FLOAT_EXPR:
3817 case MIN_EXPR:
3818 case MAX_EXPR:
3819 case ABS_EXPR:
3820 case ABSU_EXPR:
3822 case LSHIFT_EXPR:
3823 case RSHIFT_EXPR:
3824 case LROTATE_EXPR:
3825 case RROTATE_EXPR:
3827 case BIT_IOR_EXPR:
3828 case BIT_XOR_EXPR:
3829 case BIT_AND_EXPR:
3830 case BIT_NOT_EXPR:
3832 case TRUTH_ANDIF_EXPR:
3833 case TRUTH_ORIF_EXPR:
3834 case TRUTH_AND_EXPR:
3835 case TRUTH_OR_EXPR:
3836 case TRUTH_XOR_EXPR:
3837 case TRUTH_NOT_EXPR:
3839 case LT_EXPR:
3840 case LE_EXPR:
3841 case GT_EXPR:
3842 case GE_EXPR:
3843 case EQ_EXPR:
3844 case NE_EXPR:
3845 case ORDERED_EXPR:
3846 case UNORDERED_EXPR:
3848 case UNLT_EXPR:
3849 case UNLE_EXPR:
3850 case UNGT_EXPR:
3851 case UNGE_EXPR:
3852 case UNEQ_EXPR:
3853 case LTGT_EXPR:
3855 case CONJ_EXPR:
3857 case PREDECREMENT_EXPR:
3858 case PREINCREMENT_EXPR:
3859 case POSTDECREMENT_EXPR:
3860 case POSTINCREMENT_EXPR:
3862 case REALIGN_LOAD_EXPR:
3864 case WIDEN_SUM_EXPR:
3865 case WIDEN_MULT_EXPR:
3866 case DOT_PROD_EXPR:
3867 case SAD_EXPR:
3868 case WIDEN_MULT_PLUS_EXPR:
3869 case WIDEN_MULT_MINUS_EXPR:
3870 case WIDEN_LSHIFT_EXPR:
3872 case VEC_WIDEN_MULT_HI_EXPR:
3873 case VEC_WIDEN_MULT_LO_EXPR:
3874 case VEC_WIDEN_MULT_EVEN_EXPR:
3875 case VEC_WIDEN_MULT_ODD_EXPR:
3876 case VEC_UNPACK_HI_EXPR:
3877 case VEC_UNPACK_LO_EXPR:
3878 case VEC_UNPACK_FLOAT_HI_EXPR:
3879 case VEC_UNPACK_FLOAT_LO_EXPR:
3880 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3881 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3882 case VEC_PACK_TRUNC_EXPR:
3883 case VEC_PACK_SAT_EXPR:
3884 case VEC_PACK_FIX_TRUNC_EXPR:
3885 case VEC_PACK_FLOAT_EXPR:
3886 case VEC_WIDEN_LSHIFT_HI_EXPR:
3887 case VEC_WIDEN_LSHIFT_LO_EXPR:
3888 case VEC_DUPLICATE_EXPR:
3889 case VEC_SERIES_EXPR:
3891 return 1;
3893 /* Few special cases of expensive operations. This is useful
3894 to avoid inlining on functions having too many of these. */
3895 case TRUNC_DIV_EXPR:
3896 case CEIL_DIV_EXPR:
3897 case FLOOR_DIV_EXPR:
3898 case ROUND_DIV_EXPR:
3899 case EXACT_DIV_EXPR:
3900 case TRUNC_MOD_EXPR:
3901 case CEIL_MOD_EXPR:
3902 case FLOOR_MOD_EXPR:
3903 case ROUND_MOD_EXPR:
3904 case RDIV_EXPR:
3905 if (TREE_CODE (op2) != INTEGER_CST)
3906 return weights->div_mod_cost;
3907 return 1;
3909 /* Bit-field insertion needs several shift and mask operations. */
3910 case BIT_INSERT_EXPR:
3911 return 3;
3913 default:
3914 /* We expect a copy assignment with no operator. */
3915 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3916 return 0;
3921 /* Estimate number of instructions that will be created by expanding
3922 the statements in the statement sequence STMTS.
3923 WEIGHTS contains weights attributed to various constructs. */
3926 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3928 int cost;
3929 gimple_stmt_iterator gsi;
3931 cost = 0;
3932 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3933 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3935 return cost;
3939 /* Estimate number of instructions that will be created by expanding STMT.
3940 WEIGHTS contains weights attributed to various constructs. */
3943 estimate_num_insns (gimple *stmt, eni_weights *weights)
3945 unsigned cost, i;
3946 enum gimple_code code = gimple_code (stmt);
3947 tree lhs;
3948 tree rhs;
3950 switch (code)
3952 case GIMPLE_ASSIGN:
3953 /* Try to estimate the cost of assignments. We have three cases to
3954 deal with:
3955 1) Simple assignments to registers;
3956 2) Stores to things that must live in memory. This includes
3957 "normal" stores to scalars, but also assignments of large
3958 structures, or constructors of big arrays;
3960 Let us look at the first two cases, assuming we have "a = b + C":
3961 <GIMPLE_ASSIGN <var_decl "a">
3962 <plus_expr <var_decl "b"> <constant C>>
3963 If "a" is a GIMPLE register, the assignment to it is free on almost
3964 any target, because "a" usually ends up in a real register. Hence
3965 the only cost of this expression comes from the PLUS_EXPR, and we
3966 can ignore the GIMPLE_ASSIGN.
3967 If "a" is not a GIMPLE register, the assignment to "a" will most
3968 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3969 of moving something into "a", which we compute using the function
3970 estimate_move_cost. */
3971 if (gimple_clobber_p (stmt))
3972 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3974 lhs = gimple_assign_lhs (stmt);
3975 rhs = gimple_assign_rhs1 (stmt);
3977 cost = 0;
3979 /* Account for the cost of moving to / from memory. */
3980 if (gimple_store_p (stmt))
3981 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
3982 if (gimple_assign_load_p (stmt))
3983 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
3985 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3986 gimple_assign_rhs1 (stmt),
3987 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3988 == GIMPLE_BINARY_RHS
3989 ? gimple_assign_rhs2 (stmt) : NULL);
3990 break;
3992 case GIMPLE_COND:
3993 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3994 gimple_op (stmt, 0),
3995 gimple_op (stmt, 1));
3996 break;
3998 case GIMPLE_SWITCH:
4000 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4001 /* Take into account cost of the switch + guess 2 conditional jumps for
4002 each case label.
4004 TODO: once the switch expansion logic is sufficiently separated, we can
4005 do better job on estimating cost of the switch. */
4006 if (weights->time_based)
4007 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4008 else
4009 cost = gimple_switch_num_labels (switch_stmt) * 2;
4011 break;
4013 case GIMPLE_CALL:
4015 tree decl;
4017 if (gimple_call_internal_p (stmt))
4018 return 0;
4019 else if ((decl = gimple_call_fndecl (stmt))
4020 && DECL_BUILT_IN (decl))
4022 /* Do not special case builtins where we see the body.
4023 This just confuse inliner. */
4024 struct cgraph_node *node;
4025 if (!(node = cgraph_node::get (decl))
4026 || node->definition)
4028 /* For buitins that are likely expanded to nothing or
4029 inlined do not account operand costs. */
4030 else if (is_simple_builtin (decl))
4031 return 0;
4032 else if (is_inexpensive_builtin (decl))
4033 return weights->target_builtin_call_cost;
4034 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4036 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4037 specialize the cheap expansion we do here.
4038 ??? This asks for a more general solution. */
4039 switch (DECL_FUNCTION_CODE (decl))
4041 case BUILT_IN_POW:
4042 case BUILT_IN_POWF:
4043 case BUILT_IN_POWL:
4044 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4045 && (real_equal
4046 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4047 &dconst2)))
4048 return estimate_operator_cost
4049 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4050 gimple_call_arg (stmt, 0));
4051 break;
4053 default:
4054 break;
4059 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4060 if (gimple_call_lhs (stmt))
4061 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4062 weights->time_based);
4063 for (i = 0; i < gimple_call_num_args (stmt); i++)
4065 tree arg = gimple_call_arg (stmt, i);
4066 cost += estimate_move_cost (TREE_TYPE (arg),
4067 weights->time_based);
4069 break;
4072 case GIMPLE_RETURN:
4073 return weights->return_cost;
4075 case GIMPLE_GOTO:
4076 case GIMPLE_LABEL:
4077 case GIMPLE_NOP:
4078 case GIMPLE_PHI:
4079 case GIMPLE_PREDICT:
4080 case GIMPLE_DEBUG:
4081 return 0;
4083 case GIMPLE_ASM:
4085 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4086 /* 1000 means infinity. This avoids overflows later
4087 with very long asm statements. */
4088 if (count > 1000)
4089 count = 1000;
4090 return MAX (1, count);
4093 case GIMPLE_RESX:
4094 /* This is either going to be an external function call with one
4095 argument, or two register copy statements plus a goto. */
4096 return 2;
4098 case GIMPLE_EH_DISPATCH:
4099 /* ??? This is going to turn into a switch statement. Ideally
4100 we'd have a look at the eh region and estimate the number of
4101 edges involved. */
4102 return 10;
4104 case GIMPLE_BIND:
4105 return estimate_num_insns_seq (
4106 gimple_bind_body (as_a <gbind *> (stmt)),
4107 weights);
4109 case GIMPLE_EH_FILTER:
4110 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4112 case GIMPLE_CATCH:
4113 return estimate_num_insns_seq (gimple_catch_handler (
4114 as_a <gcatch *> (stmt)),
4115 weights);
4117 case GIMPLE_TRY:
4118 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4119 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4121 /* OMP directives are generally very expensive. */
4123 case GIMPLE_OMP_RETURN:
4124 case GIMPLE_OMP_SECTIONS_SWITCH:
4125 case GIMPLE_OMP_ATOMIC_STORE:
4126 case GIMPLE_OMP_CONTINUE:
4127 /* ...except these, which are cheap. */
4128 return 0;
4130 case GIMPLE_OMP_ATOMIC_LOAD:
4131 return weights->omp_cost;
4133 case GIMPLE_OMP_FOR:
4134 return (weights->omp_cost
4135 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4136 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4138 case GIMPLE_OMP_PARALLEL:
4139 case GIMPLE_OMP_TASK:
4140 case GIMPLE_OMP_CRITICAL:
4141 case GIMPLE_OMP_MASTER:
4142 case GIMPLE_OMP_TASKGROUP:
4143 case GIMPLE_OMP_ORDERED:
4144 case GIMPLE_OMP_SECTION:
4145 case GIMPLE_OMP_SECTIONS:
4146 case GIMPLE_OMP_SINGLE:
4147 case GIMPLE_OMP_TARGET:
4148 case GIMPLE_OMP_TEAMS:
4149 return (weights->omp_cost
4150 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4152 case GIMPLE_TRANSACTION:
4153 return (weights->tm_cost
4154 + estimate_num_insns_seq (gimple_transaction_body (
4155 as_a <gtransaction *> (stmt)),
4156 weights));
4158 default:
4159 gcc_unreachable ();
4162 return cost;
4165 /* Estimate number of instructions that will be created by expanding
4166 function FNDECL. WEIGHTS contains weights attributed to various
4167 constructs. */
4170 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4172 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4173 gimple_stmt_iterator bsi;
4174 basic_block bb;
4175 int n = 0;
4177 gcc_assert (my_function && my_function->cfg);
4178 FOR_EACH_BB_FN (bb, my_function)
4180 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4181 n += estimate_num_insns (gsi_stmt (bsi), weights);
4184 return n;
4188 /* Initializes weights used by estimate_num_insns. */
4190 void
4191 init_inline_once (void)
4193 eni_size_weights.call_cost = 1;
4194 eni_size_weights.indirect_call_cost = 3;
4195 eni_size_weights.target_builtin_call_cost = 1;
4196 eni_size_weights.div_mod_cost = 1;
4197 eni_size_weights.omp_cost = 40;
4198 eni_size_weights.tm_cost = 10;
4199 eni_size_weights.time_based = false;
4200 eni_size_weights.return_cost = 1;
4202 /* Estimating time for call is difficult, since we have no idea what the
4203 called function does. In the current uses of eni_time_weights,
4204 underestimating the cost does less harm than overestimating it, so
4205 we choose a rather small value here. */
4206 eni_time_weights.call_cost = 10;
4207 eni_time_weights.indirect_call_cost = 15;
4208 eni_time_weights.target_builtin_call_cost = 1;
4209 eni_time_weights.div_mod_cost = 10;
4210 eni_time_weights.omp_cost = 40;
4211 eni_time_weights.tm_cost = 40;
4212 eni_time_weights.time_based = true;
4213 eni_time_weights.return_cost = 2;
4217 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4219 static void
4220 prepend_lexical_block (tree current_block, tree new_block)
4222 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4223 BLOCK_SUBBLOCKS (current_block) = new_block;
4224 BLOCK_SUPERCONTEXT (new_block) = current_block;
4227 /* Add local variables from CALLEE to CALLER. */
4229 static inline void
4230 add_local_variables (struct function *callee, struct function *caller,
4231 copy_body_data *id)
4233 tree var;
4234 unsigned ix;
4236 FOR_EACH_LOCAL_DECL (callee, ix, var)
4237 if (!can_be_nonlocal (var, id))
4239 tree new_var = remap_decl (var, id);
4241 /* Remap debug-expressions. */
4242 if (VAR_P (new_var)
4243 && DECL_HAS_DEBUG_EXPR_P (var)
4244 && new_var != var)
4246 tree tem = DECL_DEBUG_EXPR (var);
4247 bool old_regimplify = id->regimplify;
4248 id->remapping_type_depth++;
4249 walk_tree (&tem, copy_tree_body_r, id, NULL);
4250 id->remapping_type_depth--;
4251 id->regimplify = old_regimplify;
4252 SET_DECL_DEBUG_EXPR (new_var, tem);
4253 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4255 add_local_decl (caller, new_var);
4259 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4260 have brought in or introduced any debug stmts for SRCVAR. */
4262 static inline void
4263 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4265 tree *remappedvarp = id->decl_map->get (srcvar);
4267 if (!remappedvarp)
4268 return;
4270 if (!VAR_P (*remappedvarp))
4271 return;
4273 if (*remappedvarp == id->retvar)
4274 return;
4276 tree tvar = target_for_debug_bind (*remappedvarp);
4277 if (!tvar)
4278 return;
4280 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4281 id->call_stmt);
4282 gimple_seq_add_stmt (bindings, stmt);
4285 /* For each inlined variable for which we may have debug bind stmts,
4286 add before GSI a final debug stmt resetting it, marking the end of
4287 its life, so that var-tracking knows it doesn't have to compute
4288 further locations for it. */
4290 static inline void
4291 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4293 tree var;
4294 unsigned ix;
4295 gimple_seq bindings = NULL;
4297 if (!gimple_in_ssa_p (id->src_cfun))
4298 return;
4300 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4301 return;
4303 for (var = DECL_ARGUMENTS (id->src_fn);
4304 var; var = DECL_CHAIN (var))
4305 reset_debug_binding (id, var, &bindings);
4307 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4308 reset_debug_binding (id, var, &bindings);
4310 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4313 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4315 static bool
4316 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4318 tree use_retvar;
4319 tree fn;
4320 hash_map<tree, tree> *dst;
4321 hash_map<tree, tree> *st = NULL;
4322 tree return_slot;
4323 tree modify_dest;
4324 struct cgraph_edge *cg_edge;
4325 cgraph_inline_failed_t reason;
4326 basic_block return_block;
4327 edge e;
4328 gimple_stmt_iterator gsi, stmt_gsi;
4329 bool successfully_inlined = false;
4330 bool purge_dead_abnormal_edges;
4331 gcall *call_stmt;
4332 unsigned int prop_mask, src_properties;
4333 struct function *dst_cfun;
4334 tree simduid;
4335 use_operand_p use;
4336 gimple *simtenter_stmt = NULL;
4337 vec<tree> *simtvars_save;
4339 /* The gimplifier uses input_location in too many places, such as
4340 internal_get_tmp_var (). */
4341 location_t saved_location = input_location;
4342 input_location = gimple_location (stmt);
4344 /* From here on, we're only interested in CALL_EXPRs. */
4345 call_stmt = dyn_cast <gcall *> (stmt);
4346 if (!call_stmt)
4347 goto egress;
4349 cg_edge = id->dst_node->get_edge (stmt);
4350 gcc_checking_assert (cg_edge);
4351 /* First, see if we can figure out what function is being called.
4352 If we cannot, then there is no hope of inlining the function. */
4353 if (cg_edge->indirect_unknown_callee)
4354 goto egress;
4355 fn = cg_edge->callee->decl;
4356 gcc_checking_assert (fn);
4358 /* If FN is a declaration of a function in a nested scope that was
4359 globally declared inline, we don't set its DECL_INITIAL.
4360 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4361 C++ front-end uses it for cdtors to refer to their internal
4362 declarations, that are not real functions. Fortunately those
4363 don't have trees to be saved, so we can tell by checking their
4364 gimple_body. */
4365 if (!DECL_INITIAL (fn)
4366 && DECL_ABSTRACT_ORIGIN (fn)
4367 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4368 fn = DECL_ABSTRACT_ORIGIN (fn);
4370 /* Don't try to inline functions that are not well-suited to inlining. */
4371 if (cg_edge->inline_failed)
4373 reason = cg_edge->inline_failed;
4374 /* If this call was originally indirect, we do not want to emit any
4375 inlining related warnings or sorry messages because there are no
4376 guarantees regarding those. */
4377 if (cg_edge->indirect_inlining_edge)
4378 goto egress;
4380 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4381 /* For extern inline functions that get redefined we always
4382 silently ignored always_inline flag. Better behavior would
4383 be to be able to keep both bodies and use extern inline body
4384 for inlining, but we can't do that because frontends overwrite
4385 the body. */
4386 && !cg_edge->callee->local.redefined_extern_inline
4387 /* During early inline pass, report only when optimization is
4388 not turned on. */
4389 && (symtab->global_info_ready
4390 || !optimize
4391 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4392 /* PR 20090218-1_0.c. Body can be provided by another module. */
4393 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4395 error ("inlining failed in call to always_inline %q+F: %s", fn,
4396 cgraph_inline_failed_string (reason));
4397 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4398 inform (gimple_location (stmt), "called from here");
4399 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4400 inform (DECL_SOURCE_LOCATION (cfun->decl),
4401 "called from this function");
4403 else if (warn_inline
4404 && DECL_DECLARED_INLINE_P (fn)
4405 && !DECL_NO_INLINE_WARNING_P (fn)
4406 && !DECL_IN_SYSTEM_HEADER (fn)
4407 && reason != CIF_UNSPECIFIED
4408 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4409 /* Do not warn about not inlined recursive calls. */
4410 && !cg_edge->recursive_p ()
4411 /* Avoid warnings during early inline pass. */
4412 && symtab->global_info_ready)
4414 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4415 fn, _(cgraph_inline_failed_string (reason))))
4417 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4418 inform (gimple_location (stmt), "called from here");
4419 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4420 inform (DECL_SOURCE_LOCATION (cfun->decl),
4421 "called from this function");
4424 goto egress;
4426 id->src_node = cg_edge->callee;
4428 /* If callee is thunk, all we need is to adjust the THIS pointer
4429 and redirect to function being thunked. */
4430 if (id->src_node->thunk.thunk_p)
4432 cgraph_edge *edge;
4433 tree virtual_offset = NULL;
4434 profile_count count = cg_edge->count;
4435 tree op;
4436 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4438 cg_edge->remove ();
4439 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4440 gimple_uid (stmt),
4441 profile_count::one (),
4442 profile_count::one (),
4443 true);
4444 edge->count = count;
4445 if (id->src_node->thunk.virtual_offset_p)
4446 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4447 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4448 NULL);
4449 gsi_insert_before (&iter, gimple_build_assign (op,
4450 gimple_call_arg (stmt, 0)),
4451 GSI_NEW_STMT);
4452 gcc_assert (id->src_node->thunk.this_adjusting);
4453 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4454 virtual_offset);
4456 gimple_call_set_arg (stmt, 0, op);
4457 gimple_call_set_fndecl (stmt, edge->callee->decl);
4458 update_stmt (stmt);
4459 id->src_node->remove ();
4460 expand_call_inline (bb, stmt, id);
4461 maybe_remove_unused_call_args (cfun, stmt);
4462 return true;
4464 fn = cg_edge->callee->decl;
4465 cg_edge->callee->get_untransformed_body ();
4467 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4468 cg_edge->callee->verify ();
4470 /* We will be inlining this callee. */
4471 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4472 id->assign_stmts.create (0);
4474 /* Update the callers EH personality. */
4475 if (DECL_FUNCTION_PERSONALITY (fn))
4476 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4477 = DECL_FUNCTION_PERSONALITY (fn);
4479 /* Split the block before the GIMPLE_CALL. */
4480 stmt_gsi = gsi_for_stmt (stmt);
4481 gsi_prev (&stmt_gsi);
4482 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4483 bb = e->src;
4484 return_block = e->dest;
4485 remove_edge (e);
4487 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4488 been the source of abnormal edges. In this case, schedule
4489 the removal of dead abnormal edges. */
4490 gsi = gsi_start_bb (return_block);
4491 gsi_next (&gsi);
4492 purge_dead_abnormal_edges = gsi_end_p (gsi);
4494 stmt_gsi = gsi_start_bb (return_block);
4496 /* Build a block containing code to initialize the arguments, the
4497 actual inline expansion of the body, and a label for the return
4498 statements within the function to jump to. The type of the
4499 statement expression is the return type of the function call.
4500 ??? If the call does not have an associated block then we will
4501 remap all callee blocks to NULL, effectively dropping most of
4502 its debug information. This should only happen for calls to
4503 artificial decls inserted by the compiler itself. We need to
4504 either link the inlined blocks into the caller block tree or
4505 not refer to them in any way to not break GC for locations. */
4506 if (gimple_block (stmt))
4508 id->block = make_node (BLOCK);
4509 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4510 BLOCK_SOURCE_LOCATION (id->block)
4511 = LOCATION_LOCUS (gimple_location (stmt));
4512 prepend_lexical_block (gimple_block (stmt), id->block);
4515 /* Local declarations will be replaced by their equivalents in this map. */
4516 st = id->decl_map;
4517 id->decl_map = new hash_map<tree, tree>;
4518 dst = id->debug_map;
4519 id->debug_map = NULL;
4521 /* Record the function we are about to inline. */
4522 id->src_fn = fn;
4523 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4524 id->reset_location = DECL_IGNORED_P (fn);
4525 id->call_stmt = call_stmt;
4527 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4528 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4529 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4530 simtvars_save = id->dst_simt_vars;
4531 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4532 && (simduid = bb->loop_father->simduid) != NULL_TREE
4533 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4534 && single_imm_use (simduid, &use, &simtenter_stmt)
4535 && is_gimple_call (simtenter_stmt)
4536 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4537 vec_alloc (id->dst_simt_vars, 0);
4538 else
4539 id->dst_simt_vars = NULL;
4541 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4542 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4544 /* If the src function contains an IFN_VA_ARG, then so will the dst
4545 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4546 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4547 src_properties = id->src_cfun->curr_properties & prop_mask;
4548 if (src_properties != prop_mask)
4549 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4551 gcc_assert (!id->src_cfun->after_inlining);
4553 id->entry_bb = bb;
4554 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4556 gimple_stmt_iterator si = gsi_last_bb (bb);
4557 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4558 NOT_TAKEN),
4559 GSI_NEW_STMT);
4561 initialize_inlined_parameters (id, stmt, fn, bb);
4562 if (debug_nonbind_markers_p && debug_inline_points && id->block
4563 && inlined_function_outer_scope_p (id->block))
4565 gimple_stmt_iterator si = gsi_last_bb (bb);
4566 gsi_insert_after (&si, gimple_build_debug_inline_entry
4567 (id->block, input_location), GSI_NEW_STMT);
4570 if (DECL_INITIAL (fn))
4572 if (gimple_block (stmt))
4574 tree *var;
4576 prepend_lexical_block (id->block,
4577 remap_blocks (DECL_INITIAL (fn), id));
4578 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4579 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4580 == NULL_TREE));
4581 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4582 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4583 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4584 under it. The parameters can be then evaluated in the debugger,
4585 but don't show in backtraces. */
4586 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4587 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4589 tree v = *var;
4590 *var = TREE_CHAIN (v);
4591 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4592 BLOCK_VARS (id->block) = v;
4594 else
4595 var = &TREE_CHAIN (*var);
4597 else
4598 remap_blocks_to_null (DECL_INITIAL (fn), id);
4601 /* Return statements in the function body will be replaced by jumps
4602 to the RET_LABEL. */
4603 gcc_assert (DECL_INITIAL (fn));
4604 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4606 /* Find the LHS to which the result of this call is assigned. */
4607 return_slot = NULL;
4608 if (gimple_call_lhs (stmt))
4610 modify_dest = gimple_call_lhs (stmt);
4612 /* The function which we are inlining might not return a value,
4613 in which case we should issue a warning that the function
4614 does not return a value. In that case the optimizers will
4615 see that the variable to which the value is assigned was not
4616 initialized. We do not want to issue a warning about that
4617 uninitialized variable. */
4618 if (DECL_P (modify_dest))
4619 TREE_NO_WARNING (modify_dest) = 1;
4621 if (gimple_call_return_slot_opt_p (call_stmt))
4623 return_slot = modify_dest;
4624 modify_dest = NULL;
4627 else
4628 modify_dest = NULL;
4630 /* If we are inlining a call to the C++ operator new, we don't want
4631 to use type based alias analysis on the return value. Otherwise
4632 we may get confused if the compiler sees that the inlined new
4633 function returns a pointer which was just deleted. See bug
4634 33407. */
4635 if (DECL_IS_OPERATOR_NEW (fn))
4637 return_slot = NULL;
4638 modify_dest = NULL;
4641 /* Declare the return variable for the function. */
4642 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4644 /* Add local vars in this inlined callee to caller. */
4645 add_local_variables (id->src_cfun, cfun, id);
4647 if (dump_file && (dump_flags & TDF_DETAILS))
4649 fprintf (dump_file, "Inlining %s to %s with frequency %4.2f\n",
4650 id->src_node->dump_name (),
4651 id->dst_node->dump_name (),
4652 cg_edge->sreal_frequency ().to_double ());
4653 id->src_node->dump (dump_file);
4654 id->dst_node->dump (dump_file);
4657 /* This is it. Duplicate the callee body. Assume callee is
4658 pre-gimplified. Note that we must not alter the caller
4659 function in any way before this point, as this CALL_EXPR may be
4660 a self-referential call; if we're calling ourselves, we need to
4661 duplicate our body before altering anything. */
4662 copy_body (id, bb, return_block, NULL);
4664 reset_debug_bindings (id, stmt_gsi);
4666 if (flag_stack_reuse != SR_NONE)
4667 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4668 if (!TREE_THIS_VOLATILE (p))
4670 tree *varp = id->decl_map->get (p);
4671 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4673 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4674 gimple *clobber_stmt;
4675 TREE_THIS_VOLATILE (clobber) = 1;
4676 clobber_stmt = gimple_build_assign (*varp, clobber);
4677 gimple_set_location (clobber_stmt, gimple_location (stmt));
4678 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4682 /* Reset the escaped solution. */
4683 if (cfun->gimple_df)
4684 pt_solution_reset (&cfun->gimple_df->escaped);
4686 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4687 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4689 size_t nargs = gimple_call_num_args (simtenter_stmt);
4690 vec<tree> *vars = id->dst_simt_vars;
4691 auto_vec<tree> newargs (nargs + vars->length ());
4692 for (size_t i = 0; i < nargs; i++)
4693 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4694 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4696 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4697 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4699 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4700 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4701 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4702 gsi_replace (&gsi, g, false);
4704 vec_free (id->dst_simt_vars);
4705 id->dst_simt_vars = simtvars_save;
4707 /* Clean up. */
4708 if (id->debug_map)
4710 delete id->debug_map;
4711 id->debug_map = dst;
4713 delete id->decl_map;
4714 id->decl_map = st;
4716 /* Unlink the calls virtual operands before replacing it. */
4717 unlink_stmt_vdef (stmt);
4718 if (gimple_vdef (stmt)
4719 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4720 release_ssa_name (gimple_vdef (stmt));
4722 /* If the inlined function returns a result that we care about,
4723 substitute the GIMPLE_CALL with an assignment of the return
4724 variable to the LHS of the call. That is, if STMT was
4725 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4726 if (use_retvar && gimple_call_lhs (stmt))
4728 gimple *old_stmt = stmt;
4729 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4730 gimple_set_location (stmt, gimple_location (old_stmt));
4731 gsi_replace (&stmt_gsi, stmt, false);
4732 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4733 /* Append a clobber for id->retvar if easily possible. */
4734 if (flag_stack_reuse != SR_NONE
4735 && id->retvar
4736 && VAR_P (id->retvar)
4737 && id->retvar != return_slot
4738 && id->retvar != modify_dest
4739 && !TREE_THIS_VOLATILE (id->retvar)
4740 && !is_gimple_reg (id->retvar)
4741 && !stmt_ends_bb_p (stmt))
4743 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4744 gimple *clobber_stmt;
4745 TREE_THIS_VOLATILE (clobber) = 1;
4746 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4747 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4748 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4751 else
4753 /* Handle the case of inlining a function with no return
4754 statement, which causes the return value to become undefined. */
4755 if (gimple_call_lhs (stmt)
4756 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4758 tree name = gimple_call_lhs (stmt);
4759 tree var = SSA_NAME_VAR (name);
4760 tree def = var ? ssa_default_def (cfun, var) : NULL;
4762 if (def)
4764 /* If the variable is used undefined, make this name
4765 undefined via a move. */
4766 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4767 gsi_replace (&stmt_gsi, stmt, true);
4769 else
4771 if (!var)
4773 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4774 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4776 /* Otherwise make this variable undefined. */
4777 gsi_remove (&stmt_gsi, true);
4778 set_ssa_default_def (cfun, var, name);
4779 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4782 /* Replace with a clobber for id->retvar. */
4783 else if (flag_stack_reuse != SR_NONE
4784 && id->retvar
4785 && VAR_P (id->retvar)
4786 && id->retvar != return_slot
4787 && id->retvar != modify_dest
4788 && !TREE_THIS_VOLATILE (id->retvar)
4789 && !is_gimple_reg (id->retvar))
4791 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4792 gimple *clobber_stmt;
4793 TREE_THIS_VOLATILE (clobber) = 1;
4794 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4795 gimple_set_location (clobber_stmt, gimple_location (stmt));
4796 gsi_replace (&stmt_gsi, clobber_stmt, false);
4797 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4799 else
4800 gsi_remove (&stmt_gsi, true);
4803 if (purge_dead_abnormal_edges)
4805 gimple_purge_dead_eh_edges (return_block);
4806 gimple_purge_dead_abnormal_call_edges (return_block);
4809 /* If the value of the new expression is ignored, that's OK. We
4810 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4811 the equivalent inlined version either. */
4812 if (is_gimple_assign (stmt))
4814 gcc_assert (gimple_assign_single_p (stmt)
4815 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4816 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4819 id->assign_stmts.release ();
4821 /* Output the inlining info for this abstract function, since it has been
4822 inlined. If we don't do this now, we can lose the information about the
4823 variables in the function when the blocks get blown away as soon as we
4824 remove the cgraph node. */
4825 if (gimple_block (stmt))
4826 (*debug_hooks->outlining_inline_function) (fn);
4828 /* Update callgraph if needed. */
4829 cg_edge->callee->remove ();
4831 id->block = NULL_TREE;
4832 id->retvar = NULL_TREE;
4833 successfully_inlined = true;
4835 egress:
4836 input_location = saved_location;
4837 return successfully_inlined;
4840 /* Expand call statements reachable from STMT_P.
4841 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4842 in a MODIFY_EXPR. */
4844 static bool
4845 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4847 gimple_stmt_iterator gsi;
4848 bool inlined = false;
4850 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4852 gimple *stmt = gsi_stmt (gsi);
4853 gsi_prev (&gsi);
4855 if (is_gimple_call (stmt)
4856 && !gimple_call_internal_p (stmt))
4857 inlined |= expand_call_inline (bb, stmt, id);
4860 return inlined;
4864 /* Walk all basic blocks created after FIRST and try to fold every statement
4865 in the STATEMENTS pointer set. */
4867 static void
4868 fold_marked_statements (int first, hash_set<gimple *> *statements)
4870 for (; first < n_basic_blocks_for_fn (cfun); first++)
4871 if (BASIC_BLOCK_FOR_FN (cfun, first))
4873 gimple_stmt_iterator gsi;
4875 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4876 !gsi_end_p (gsi);
4877 gsi_next (&gsi))
4878 if (statements->contains (gsi_stmt (gsi)))
4880 gimple *old_stmt = gsi_stmt (gsi);
4881 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4883 if (old_decl && DECL_BUILT_IN (old_decl))
4885 /* Folding builtins can create multiple instructions,
4886 we need to look at all of them. */
4887 gimple_stmt_iterator i2 = gsi;
4888 gsi_prev (&i2);
4889 if (fold_stmt (&gsi))
4891 gimple *new_stmt;
4892 /* If a builtin at the end of a bb folded into nothing,
4893 the following loop won't work. */
4894 if (gsi_end_p (gsi))
4896 cgraph_update_edges_for_call_stmt (old_stmt,
4897 old_decl, NULL);
4898 break;
4900 if (gsi_end_p (i2))
4901 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4902 else
4903 gsi_next (&i2);
4904 while (1)
4906 new_stmt = gsi_stmt (i2);
4907 update_stmt (new_stmt);
4908 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4909 new_stmt);
4911 if (new_stmt == gsi_stmt (gsi))
4913 /* It is okay to check only for the very last
4914 of these statements. If it is a throwing
4915 statement nothing will change. If it isn't
4916 this can remove EH edges. If that weren't
4917 correct then because some intermediate stmts
4918 throw, but not the last one. That would mean
4919 we'd have to split the block, which we can't
4920 here and we'd loose anyway. And as builtins
4921 probably never throw, this all
4922 is mood anyway. */
4923 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4924 new_stmt))
4925 gimple_purge_dead_eh_edges (
4926 BASIC_BLOCK_FOR_FN (cfun, first));
4927 break;
4929 gsi_next (&i2);
4933 else if (fold_stmt (&gsi))
4935 /* Re-read the statement from GSI as fold_stmt() may
4936 have changed it. */
4937 gimple *new_stmt = gsi_stmt (gsi);
4938 update_stmt (new_stmt);
4940 if (is_gimple_call (old_stmt)
4941 || is_gimple_call (new_stmt))
4942 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4943 new_stmt);
4945 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4946 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4947 first));
4953 /* Expand calls to inline functions in the body of FN. */
4955 unsigned int
4956 optimize_inline_calls (tree fn)
4958 copy_body_data id;
4959 basic_block bb;
4960 int last = n_basic_blocks_for_fn (cfun);
4961 bool inlined_p = false;
4963 /* Clear out ID. */
4964 memset (&id, 0, sizeof (id));
4966 id.src_node = id.dst_node = cgraph_node::get (fn);
4967 gcc_assert (id.dst_node->definition);
4968 id.dst_fn = fn;
4969 /* Or any functions that aren't finished yet. */
4970 if (current_function_decl)
4971 id.dst_fn = current_function_decl;
4973 id.copy_decl = copy_decl_maybe_to_var;
4974 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4975 id.transform_new_cfg = false;
4976 id.transform_return_to_modify = true;
4977 id.transform_parameter = true;
4978 id.transform_lang_insert_block = NULL;
4979 id.statements_to_fold = new hash_set<gimple *>;
4981 push_gimplify_context ();
4983 /* We make no attempts to keep dominance info up-to-date. */
4984 free_dominance_info (CDI_DOMINATORS);
4985 free_dominance_info (CDI_POST_DOMINATORS);
4987 /* Register specific gimple functions. */
4988 gimple_register_cfg_hooks ();
4990 /* Reach the trees by walking over the CFG, and note the
4991 enclosing basic-blocks in the call edges. */
4992 /* We walk the blocks going forward, because inlined function bodies
4993 will split id->current_basic_block, and the new blocks will
4994 follow it; we'll trudge through them, processing their CALL_EXPRs
4995 along the way. */
4996 FOR_EACH_BB_FN (bb, cfun)
4997 inlined_p |= gimple_expand_calls_inline (bb, &id);
4999 pop_gimplify_context (NULL);
5001 if (flag_checking)
5003 struct cgraph_edge *e;
5005 id.dst_node->verify ();
5007 /* Double check that we inlined everything we are supposed to inline. */
5008 for (e = id.dst_node->callees; e; e = e->next_callee)
5009 gcc_assert (e->inline_failed);
5012 /* Fold queued statements. */
5013 update_max_bb_count ();
5014 fold_marked_statements (last, id.statements_to_fold);
5015 delete id.statements_to_fold;
5017 gcc_assert (!id.debug_stmts.exists ());
5019 /* If we didn't inline into the function there is nothing to do. */
5020 if (!inlined_p)
5021 return 0;
5023 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5024 number_blocks (fn);
5026 delete_unreachable_blocks_update_callgraph (&id);
5027 if (flag_checking)
5028 id.dst_node->verify ();
5030 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5031 not possible yet - the IPA passes might make various functions to not
5032 throw and they don't care to proactively update local EH info. This is
5033 done later in fixup_cfg pass that also execute the verification. */
5034 return (TODO_update_ssa
5035 | TODO_cleanup_cfg
5036 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5037 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5038 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5039 ? TODO_rebuild_frequencies : 0));
5042 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5044 tree
5045 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5047 enum tree_code code = TREE_CODE (*tp);
5048 enum tree_code_class cl = TREE_CODE_CLASS (code);
5050 /* We make copies of most nodes. */
5051 if (IS_EXPR_CODE_CLASS (cl)
5052 || code == TREE_LIST
5053 || code == TREE_VEC
5054 || code == TYPE_DECL
5055 || code == OMP_CLAUSE)
5057 /* Because the chain gets clobbered when we make a copy, we save it
5058 here. */
5059 tree chain = NULL_TREE, new_tree;
5061 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5062 chain = TREE_CHAIN (*tp);
5064 /* Copy the node. */
5065 new_tree = copy_node (*tp);
5067 *tp = new_tree;
5069 /* Now, restore the chain, if appropriate. That will cause
5070 walk_tree to walk into the chain as well. */
5071 if (code == PARM_DECL
5072 || code == TREE_LIST
5073 || code == OMP_CLAUSE)
5074 TREE_CHAIN (*tp) = chain;
5076 /* For now, we don't update BLOCKs when we make copies. So, we
5077 have to nullify all BIND_EXPRs. */
5078 if (TREE_CODE (*tp) == BIND_EXPR)
5079 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5081 else if (code == CONSTRUCTOR)
5083 /* CONSTRUCTOR nodes need special handling because
5084 we need to duplicate the vector of elements. */
5085 tree new_tree;
5087 new_tree = copy_node (*tp);
5088 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5089 *tp = new_tree;
5091 else if (code == STATEMENT_LIST)
5092 /* We used to just abort on STATEMENT_LIST, but we can run into them
5093 with statement-expressions (c++/40975). */
5094 copy_statement_list (tp);
5095 else if (TREE_CODE_CLASS (code) == tcc_type)
5096 *walk_subtrees = 0;
5097 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5098 *walk_subtrees = 0;
5099 else if (TREE_CODE_CLASS (code) == tcc_constant)
5100 *walk_subtrees = 0;
5101 return NULL_TREE;
5104 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5105 information indicating to what new SAVE_EXPR this one should be mapped,
5106 use that one. Otherwise, create a new node and enter it in ST. FN is
5107 the function into which the copy will be placed. */
5109 static void
5110 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5112 tree *n;
5113 tree t;
5115 /* See if we already encountered this SAVE_EXPR. */
5116 n = st->get (*tp);
5118 /* If we didn't already remap this SAVE_EXPR, do so now. */
5119 if (!n)
5121 t = copy_node (*tp);
5123 /* Remember this SAVE_EXPR. */
5124 st->put (*tp, t);
5125 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5126 st->put (t, t);
5128 else
5130 /* We've already walked into this SAVE_EXPR; don't do it again. */
5131 *walk_subtrees = 0;
5132 t = *n;
5135 /* Replace this SAVE_EXPR with the copy. */
5136 *tp = t;
5139 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5140 label, copies the declaration and enters it in the splay_tree in DATA (which
5141 is really a 'copy_body_data *'. */
5143 static tree
5144 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5145 bool *handled_ops_p ATTRIBUTE_UNUSED,
5146 struct walk_stmt_info *wi)
5148 copy_body_data *id = (copy_body_data *) wi->info;
5149 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5151 if (stmt)
5153 tree decl = gimple_label_label (stmt);
5155 /* Copy the decl and remember the copy. */
5156 insert_decl_map (id, decl, id->copy_decl (decl, id));
5159 return NULL_TREE;
5162 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5163 struct walk_stmt_info *wi);
5165 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5166 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5167 remaps all local declarations to appropriate replacements in gimple
5168 operands. */
5170 static tree
5171 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5173 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5174 copy_body_data *id = (copy_body_data *) wi->info;
5175 hash_map<tree, tree> *st = id->decl_map;
5176 tree *n;
5177 tree expr = *tp;
5179 /* For recursive invocations this is no longer the LHS itself. */
5180 bool is_lhs = wi->is_lhs;
5181 wi->is_lhs = false;
5183 if (TREE_CODE (expr) == SSA_NAME)
5185 *tp = remap_ssa_name (*tp, id);
5186 *walk_subtrees = 0;
5187 if (is_lhs)
5188 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5190 /* Only a local declaration (variable or label). */
5191 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5192 || TREE_CODE (expr) == LABEL_DECL)
5194 /* Lookup the declaration. */
5195 n = st->get (expr);
5197 /* If it's there, remap it. */
5198 if (n)
5199 *tp = *n;
5200 *walk_subtrees = 0;
5202 else if (TREE_CODE (expr) == STATEMENT_LIST
5203 || TREE_CODE (expr) == BIND_EXPR
5204 || TREE_CODE (expr) == SAVE_EXPR)
5205 gcc_unreachable ();
5206 else if (TREE_CODE (expr) == TARGET_EXPR)
5208 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5209 It's OK for this to happen if it was part of a subtree that
5210 isn't immediately expanded, such as operand 2 of another
5211 TARGET_EXPR. */
5212 if (!TREE_OPERAND (expr, 1))
5214 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5215 TREE_OPERAND (expr, 3) = NULL_TREE;
5218 else if (TREE_CODE (expr) == OMP_CLAUSE)
5220 /* Before the omplower pass completes, some OMP clauses can contain
5221 sequences that are neither copied by gimple_seq_copy nor walked by
5222 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5223 in those situations, we have to copy and process them explicitely. */
5225 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5227 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5228 seq = duplicate_remap_omp_clause_seq (seq, wi);
5229 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5231 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5233 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5234 seq = duplicate_remap_omp_clause_seq (seq, wi);
5235 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5237 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5239 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5240 seq = duplicate_remap_omp_clause_seq (seq, wi);
5241 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5242 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5243 seq = duplicate_remap_omp_clause_seq (seq, wi);
5244 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5248 /* Keep iterating. */
5249 return NULL_TREE;
5253 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5254 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5255 remaps all local declarations to appropriate replacements in gimple
5256 statements. */
5258 static tree
5259 replace_locals_stmt (gimple_stmt_iterator *gsip,
5260 bool *handled_ops_p ATTRIBUTE_UNUSED,
5261 struct walk_stmt_info *wi)
5263 copy_body_data *id = (copy_body_data *) wi->info;
5264 gimple *gs = gsi_stmt (*gsip);
5266 if (gbind *stmt = dyn_cast <gbind *> (gs))
5268 tree block = gimple_bind_block (stmt);
5270 if (block)
5272 remap_block (&block, id);
5273 gimple_bind_set_block (stmt, block);
5276 /* This will remap a lot of the same decls again, but this should be
5277 harmless. */
5278 if (gimple_bind_vars (stmt))
5280 tree old_var, decls = gimple_bind_vars (stmt);
5282 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5283 if (!can_be_nonlocal (old_var, id)
5284 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5285 remap_decl (old_var, id);
5287 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5288 id->prevent_decl_creation_for_types = true;
5289 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5290 id->prevent_decl_creation_for_types = false;
5294 /* Keep iterating. */
5295 return NULL_TREE;
5298 /* Create a copy of SEQ and remap all decls in it. */
5300 static gimple_seq
5301 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5303 if (!seq)
5304 return NULL;
5306 /* If there are any labels in OMP sequences, they can be only referred to in
5307 the sequence itself and therefore we can do both here. */
5308 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5309 gimple_seq copy = gimple_seq_copy (seq);
5310 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5311 return copy;
5314 /* Copies everything in SEQ and replaces variables and labels local to
5315 current_function_decl. */
5317 gimple_seq
5318 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5320 copy_body_data id;
5321 struct walk_stmt_info wi;
5322 gimple_seq copy;
5324 /* There's nothing to do for NULL_TREE. */
5325 if (seq == NULL)
5326 return seq;
5328 /* Set up ID. */
5329 memset (&id, 0, sizeof (id));
5330 id.src_fn = current_function_decl;
5331 id.dst_fn = current_function_decl;
5332 id.src_cfun = cfun;
5333 id.decl_map = new hash_map<tree, tree>;
5334 id.debug_map = NULL;
5336 id.copy_decl = copy_decl_no_change;
5337 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5338 id.transform_new_cfg = false;
5339 id.transform_return_to_modify = false;
5340 id.transform_parameter = false;
5341 id.transform_lang_insert_block = NULL;
5343 /* Walk the tree once to find local labels. */
5344 memset (&wi, 0, sizeof (wi));
5345 hash_set<tree> visited;
5346 wi.info = &id;
5347 wi.pset = &visited;
5348 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5350 copy = gimple_seq_copy (seq);
5352 /* Walk the copy, remapping decls. */
5353 memset (&wi, 0, sizeof (wi));
5354 wi.info = &id;
5355 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5357 /* Clean up. */
5358 delete id.decl_map;
5359 if (id.debug_map)
5360 delete id.debug_map;
5361 if (id.dependence_map)
5363 delete id.dependence_map;
5364 id.dependence_map = NULL;
5367 return copy;
5371 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5373 static tree
5374 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5376 if (*tp == data)
5377 return (tree) data;
5378 else
5379 return NULL;
5382 DEBUG_FUNCTION bool
5383 debug_find_tree (tree top, tree search)
5385 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5389 /* Declare the variables created by the inliner. Add all the variables in
5390 VARS to BIND_EXPR. */
5392 static void
5393 declare_inline_vars (tree block, tree vars)
5395 tree t;
5396 for (t = vars; t; t = DECL_CHAIN (t))
5398 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5399 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5400 add_local_decl (cfun, t);
5403 if (block)
5404 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5407 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5408 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5409 VAR_DECL translation. */
5411 tree
5412 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5414 /* Don't generate debug information for the copy if we wouldn't have
5415 generated it for the copy either. */
5416 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5417 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5419 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5420 declaration inspired this copy. */
5421 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5423 /* The new variable/label has no RTL, yet. */
5424 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5425 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5426 SET_DECL_RTL (copy, 0);
5428 /* These args would always appear unused, if not for this. */
5429 TREE_USED (copy) = 1;
5431 /* Set the context for the new declaration. */
5432 if (!DECL_CONTEXT (decl))
5433 /* Globals stay global. */
5435 else if (DECL_CONTEXT (decl) != id->src_fn)
5436 /* Things that weren't in the scope of the function we're inlining
5437 from aren't in the scope we're inlining to, either. */
5439 else if (TREE_STATIC (decl))
5440 /* Function-scoped static variables should stay in the original
5441 function. */
5443 else
5445 /* Ordinary automatic local variables are now in the scope of the
5446 new function. */
5447 DECL_CONTEXT (copy) = id->dst_fn;
5448 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5450 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5451 DECL_ATTRIBUTES (copy)
5452 = tree_cons (get_identifier ("omp simt private"), NULL,
5453 DECL_ATTRIBUTES (copy));
5454 id->dst_simt_vars->safe_push (copy);
5458 return copy;
5461 static tree
5462 copy_decl_to_var (tree decl, copy_body_data *id)
5464 tree copy, type;
5466 gcc_assert (TREE_CODE (decl) == PARM_DECL
5467 || TREE_CODE (decl) == RESULT_DECL);
5469 type = TREE_TYPE (decl);
5471 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5472 VAR_DECL, DECL_NAME (decl), type);
5473 if (DECL_PT_UID_SET_P (decl))
5474 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5475 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5476 TREE_READONLY (copy) = TREE_READONLY (decl);
5477 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5478 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5480 return copy_decl_for_dup_finish (id, decl, copy);
5483 /* Like copy_decl_to_var, but create a return slot object instead of a
5484 pointer variable for return by invisible reference. */
5486 static tree
5487 copy_result_decl_to_var (tree decl, copy_body_data *id)
5489 tree copy, type;
5491 gcc_assert (TREE_CODE (decl) == PARM_DECL
5492 || TREE_CODE (decl) == RESULT_DECL);
5494 type = TREE_TYPE (decl);
5495 if (DECL_BY_REFERENCE (decl))
5496 type = TREE_TYPE (type);
5498 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5499 VAR_DECL, DECL_NAME (decl), type);
5500 if (DECL_PT_UID_SET_P (decl))
5501 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5502 TREE_READONLY (copy) = TREE_READONLY (decl);
5503 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5504 if (!DECL_BY_REFERENCE (decl))
5506 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5507 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5510 return copy_decl_for_dup_finish (id, decl, copy);
5513 tree
5514 copy_decl_no_change (tree decl, copy_body_data *id)
5516 tree copy;
5518 copy = copy_node (decl);
5520 /* The COPY is not abstract; it will be generated in DST_FN. */
5521 DECL_ABSTRACT_P (copy) = false;
5522 lang_hooks.dup_lang_specific_decl (copy);
5524 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5525 been taken; it's for internal bookkeeping in expand_goto_internal. */
5526 if (TREE_CODE (copy) == LABEL_DECL)
5528 TREE_ADDRESSABLE (copy) = 0;
5529 LABEL_DECL_UID (copy) = -1;
5532 return copy_decl_for_dup_finish (id, decl, copy);
5535 static tree
5536 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5538 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5539 return copy_decl_to_var (decl, id);
5540 else
5541 return copy_decl_no_change (decl, id);
5544 /* Return a copy of the function's argument tree. */
5545 static tree
5546 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5547 bitmap args_to_skip, tree *vars)
5549 tree arg, *parg;
5550 tree new_parm = NULL;
5551 int i = 0;
5553 parg = &new_parm;
5555 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5556 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5558 tree new_tree = remap_decl (arg, id);
5559 if (TREE_CODE (new_tree) != PARM_DECL)
5560 new_tree = id->copy_decl (arg, id);
5561 lang_hooks.dup_lang_specific_decl (new_tree);
5562 *parg = new_tree;
5563 parg = &DECL_CHAIN (new_tree);
5565 else if (!id->decl_map->get (arg))
5567 /* Make an equivalent VAR_DECL. If the argument was used
5568 as temporary variable later in function, the uses will be
5569 replaced by local variable. */
5570 tree var = copy_decl_to_var (arg, id);
5571 insert_decl_map (id, arg, var);
5572 /* Declare this new variable. */
5573 DECL_CHAIN (var) = *vars;
5574 *vars = var;
5576 return new_parm;
5579 /* Return a copy of the function's static chain. */
5580 static tree
5581 copy_static_chain (tree static_chain, copy_body_data * id)
5583 tree *chain_copy, *pvar;
5585 chain_copy = &static_chain;
5586 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5588 tree new_tree = remap_decl (*pvar, id);
5589 lang_hooks.dup_lang_specific_decl (new_tree);
5590 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5591 *pvar = new_tree;
5593 return static_chain;
5596 /* Return true if the function is allowed to be versioned.
5597 This is a guard for the versioning functionality. */
5599 bool
5600 tree_versionable_function_p (tree fndecl)
5602 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5603 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5606 /* Delete all unreachable basic blocks and update callgraph.
5607 Doing so is somewhat nontrivial because we need to update all clones and
5608 remove inline function that become unreachable. */
5610 static bool
5611 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5613 bool changed = false;
5614 basic_block b, next_bb;
5616 find_unreachable_blocks ();
5618 /* Delete all unreachable basic blocks. */
5620 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5621 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5623 next_bb = b->next_bb;
5625 if (!(b->flags & BB_REACHABLE))
5627 gimple_stmt_iterator bsi;
5629 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5631 struct cgraph_edge *e;
5632 struct cgraph_node *node;
5634 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5636 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5637 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5639 if (!e->inline_failed)
5640 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5641 else
5642 e->remove ();
5644 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5645 && id->dst_node->clones)
5646 for (node = id->dst_node->clones; node != id->dst_node;)
5648 node->remove_stmt_references (gsi_stmt (bsi));
5649 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5650 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5652 if (!e->inline_failed)
5653 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5654 else
5655 e->remove ();
5658 if (node->clones)
5659 node = node->clones;
5660 else if (node->next_sibling_clone)
5661 node = node->next_sibling_clone;
5662 else
5664 while (node != id->dst_node && !node->next_sibling_clone)
5665 node = node->clone_of;
5666 if (node != id->dst_node)
5667 node = node->next_sibling_clone;
5671 delete_basic_block (b);
5672 changed = true;
5676 return changed;
5679 /* Update clone info after duplication. */
5681 static void
5682 update_clone_info (copy_body_data * id)
5684 struct cgraph_node *node;
5685 if (!id->dst_node->clones)
5686 return;
5687 for (node = id->dst_node->clones; node != id->dst_node;)
5689 /* First update replace maps to match the new body. */
5690 if (node->clone.tree_map)
5692 unsigned int i;
5693 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5695 struct ipa_replace_map *replace_info;
5696 replace_info = (*node->clone.tree_map)[i];
5697 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5698 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5701 if (node->clones)
5702 node = node->clones;
5703 else if (node->next_sibling_clone)
5704 node = node->next_sibling_clone;
5705 else
5707 while (node != id->dst_node && !node->next_sibling_clone)
5708 node = node->clone_of;
5709 if (node != id->dst_node)
5710 node = node->next_sibling_clone;
5715 /* Create a copy of a function's tree.
5716 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5717 of the original function and the new copied function
5718 respectively. In case we want to replace a DECL
5719 tree with another tree while duplicating the function's
5720 body, TREE_MAP represents the mapping between these
5721 trees. If UPDATE_CLONES is set, the call_stmt fields
5722 of edges of clones of the function will be updated.
5724 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5725 from new version.
5726 If SKIP_RETURN is true, the new version will return void.
5727 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5728 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5730 void
5731 tree_function_versioning (tree old_decl, tree new_decl,
5732 vec<ipa_replace_map *, va_gc> *tree_map,
5733 bool update_clones, bitmap args_to_skip,
5734 bool skip_return, bitmap blocks_to_copy,
5735 basic_block new_entry)
5737 struct cgraph_node *old_version_node;
5738 struct cgraph_node *new_version_node;
5739 copy_body_data id;
5740 tree p;
5741 unsigned i;
5742 struct ipa_replace_map *replace_info;
5743 basic_block old_entry_block, bb;
5744 auto_vec<gimple *, 10> init_stmts;
5745 tree vars = NULL_TREE;
5746 bitmap debug_args_to_skip = args_to_skip;
5748 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5749 && TREE_CODE (new_decl) == FUNCTION_DECL);
5750 DECL_POSSIBLY_INLINED (old_decl) = 1;
5752 old_version_node = cgraph_node::get (old_decl);
5753 gcc_checking_assert (old_version_node);
5754 new_version_node = cgraph_node::get (new_decl);
5755 gcc_checking_assert (new_version_node);
5757 /* Copy over debug args. */
5758 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5760 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5761 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5762 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5763 old_debug_args = decl_debug_args_lookup (old_decl);
5764 if (old_debug_args)
5766 new_debug_args = decl_debug_args_insert (new_decl);
5767 *new_debug_args = vec_safe_copy (*old_debug_args);
5771 /* Output the inlining info for this abstract function, since it has been
5772 inlined. If we don't do this now, we can lose the information about the
5773 variables in the function when the blocks get blown away as soon as we
5774 remove the cgraph node. */
5775 (*debug_hooks->outlining_inline_function) (old_decl);
5777 DECL_ARTIFICIAL (new_decl) = 1;
5778 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5779 if (DECL_ORIGIN (old_decl) == old_decl)
5780 old_version_node->used_as_abstract_origin = true;
5781 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5783 /* Prepare the data structures for the tree copy. */
5784 memset (&id, 0, sizeof (id));
5786 /* Generate a new name for the new version. */
5787 id.statements_to_fold = new hash_set<gimple *>;
5789 id.decl_map = new hash_map<tree, tree>;
5790 id.debug_map = NULL;
5791 id.src_fn = old_decl;
5792 id.dst_fn = new_decl;
5793 id.src_node = old_version_node;
5794 id.dst_node = new_version_node;
5795 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5796 id.blocks_to_copy = blocks_to_copy;
5798 id.copy_decl = copy_decl_no_change;
5799 id.transform_call_graph_edges
5800 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5801 id.transform_new_cfg = true;
5802 id.transform_return_to_modify = false;
5803 id.transform_parameter = false;
5804 id.transform_lang_insert_block = NULL;
5806 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5807 (DECL_STRUCT_FUNCTION (old_decl));
5808 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5809 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5810 initialize_cfun (new_decl, old_decl,
5811 new_entry ? new_entry->count : old_entry_block->count);
5812 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5813 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5814 = id.src_cfun->gimple_df->ipa_pta;
5816 /* Copy the function's static chain. */
5817 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5818 if (p)
5819 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5820 = copy_static_chain (p, &id);
5822 /* If there's a tree_map, prepare for substitution. */
5823 if (tree_map)
5824 for (i = 0; i < tree_map->length (); i++)
5826 gimple *init;
5827 replace_info = (*tree_map)[i];
5828 if (replace_info->replace_p)
5830 int parm_num = -1;
5831 if (!replace_info->old_tree)
5833 int p = replace_info->parm_num;
5834 tree parm;
5835 tree req_type, new_type;
5837 for (parm = DECL_ARGUMENTS (old_decl); p;
5838 parm = DECL_CHAIN (parm))
5839 p--;
5840 replace_info->old_tree = parm;
5841 parm_num = replace_info->parm_num;
5842 req_type = TREE_TYPE (parm);
5843 new_type = TREE_TYPE (replace_info->new_tree);
5844 if (!useless_type_conversion_p (req_type, new_type))
5846 if (fold_convertible_p (req_type, replace_info->new_tree))
5847 replace_info->new_tree
5848 = fold_build1 (NOP_EXPR, req_type,
5849 replace_info->new_tree);
5850 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5851 replace_info->new_tree
5852 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
5853 replace_info->new_tree);
5854 else
5856 if (dump_file)
5858 fprintf (dump_file, " const ");
5859 print_generic_expr (dump_file,
5860 replace_info->new_tree);
5861 fprintf (dump_file,
5862 " can't be converted to param ");
5863 print_generic_expr (dump_file, parm);
5864 fprintf (dump_file, "\n");
5866 replace_info->old_tree = NULL;
5870 else
5871 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5872 if (replace_info->old_tree)
5874 init = setup_one_parameter (&id, replace_info->old_tree,
5875 replace_info->new_tree, id.src_fn,
5876 NULL,
5877 &vars);
5878 if (init)
5879 init_stmts.safe_push (init);
5880 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
5882 if (parm_num == -1)
5884 tree parm;
5885 int p;
5886 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5887 parm = DECL_CHAIN (parm), p++)
5888 if (parm == replace_info->old_tree)
5890 parm_num = p;
5891 break;
5894 if (parm_num != -1)
5896 if (debug_args_to_skip == args_to_skip)
5898 debug_args_to_skip = BITMAP_ALLOC (NULL);
5899 bitmap_copy (debug_args_to_skip, args_to_skip);
5901 bitmap_clear_bit (debug_args_to_skip, parm_num);
5907 /* Copy the function's arguments. */
5908 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5909 DECL_ARGUMENTS (new_decl)
5910 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5911 args_to_skip, &vars);
5913 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5914 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5916 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5918 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5919 /* Add local vars. */
5920 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5922 if (DECL_RESULT (old_decl) == NULL_TREE)
5924 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5926 DECL_RESULT (new_decl)
5927 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5928 RESULT_DECL, NULL_TREE, void_type_node);
5929 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5930 cfun->returns_struct = 0;
5931 cfun->returns_pcc_struct = 0;
5933 else
5935 tree old_name;
5936 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5937 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5938 if (gimple_in_ssa_p (id.src_cfun)
5939 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5940 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5942 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5943 insert_decl_map (&id, old_name, new_name);
5944 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5945 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5949 /* Set up the destination functions loop tree. */
5950 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5952 cfun->curr_properties &= ~PROP_loops;
5953 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5954 cfun->curr_properties |= PROP_loops;
5957 /* Copy the Function's body. */
5958 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5959 new_entry);
5961 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5962 number_blocks (new_decl);
5964 /* We want to create the BB unconditionally, so that the addition of
5965 debug stmts doesn't affect BB count, which may in the end cause
5966 codegen differences. */
5967 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5968 while (init_stmts.length ())
5969 insert_init_stmt (&id, bb, init_stmts.pop ());
5970 update_clone_info (&id);
5972 /* Remap the nonlocal_goto_save_area, if any. */
5973 if (cfun->nonlocal_goto_save_area)
5975 struct walk_stmt_info wi;
5977 memset (&wi, 0, sizeof (wi));
5978 wi.info = &id;
5979 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5982 /* Clean up. */
5983 delete id.decl_map;
5984 if (id.debug_map)
5985 delete id.debug_map;
5986 free_dominance_info (CDI_DOMINATORS);
5987 free_dominance_info (CDI_POST_DOMINATORS);
5989 update_max_bb_count ();
5990 fold_marked_statements (0, id.statements_to_fold);
5991 delete id.statements_to_fold;
5992 delete_unreachable_blocks_update_callgraph (&id);
5993 if (id.dst_node->definition)
5994 cgraph_edge::rebuild_references ();
5995 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5997 calculate_dominance_info (CDI_DOMINATORS);
5998 fix_loop_structure (NULL);
6000 update_ssa (TODO_update_ssa);
6002 /* After partial cloning we need to rescale frequencies, so they are
6003 within proper range in the cloned function. */
6004 if (new_entry)
6006 struct cgraph_edge *e;
6007 rebuild_frequencies ();
6009 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6010 for (e = new_version_node->callees; e; e = e->next_callee)
6012 basic_block bb = gimple_bb (e->call_stmt);
6013 e->count = bb->count;
6015 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6017 basic_block bb = gimple_bb (e->call_stmt);
6018 e->count = bb->count;
6022 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6024 tree parm;
6025 vec<tree, va_gc> **debug_args = NULL;
6026 unsigned int len = 0;
6027 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6028 parm; parm = DECL_CHAIN (parm), i++)
6029 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6031 tree ddecl;
6033 if (debug_args == NULL)
6035 debug_args = decl_debug_args_insert (new_decl);
6036 len = vec_safe_length (*debug_args);
6038 ddecl = make_node (DEBUG_EXPR_DECL);
6039 DECL_ARTIFICIAL (ddecl) = 1;
6040 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6041 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6042 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6043 vec_safe_push (*debug_args, ddecl);
6045 if (debug_args != NULL)
6047 /* On the callee side, add
6048 DEBUG D#Y s=> parm
6049 DEBUG var => D#Y
6050 stmts to the first bb where var is a VAR_DECL created for the
6051 optimized away parameter in DECL_INITIAL block. This hints
6052 in the debug info that var (whole DECL_ORIGIN is the parm
6053 PARM_DECL) is optimized away, but could be looked up at the
6054 call site as value of D#X there. */
6055 tree var = vars, vexpr;
6056 gimple_stmt_iterator cgsi
6057 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6058 gimple *def_temp;
6059 var = vars;
6060 i = vec_safe_length (*debug_args);
6063 i -= 2;
6064 while (var != NULL_TREE
6065 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6066 var = TREE_CHAIN (var);
6067 if (var == NULL_TREE)
6068 break;
6069 vexpr = make_node (DEBUG_EXPR_DECL);
6070 parm = (**debug_args)[i];
6071 DECL_ARTIFICIAL (vexpr) = 1;
6072 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6073 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6074 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6075 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6076 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6077 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6079 while (i > len);
6083 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6084 BITMAP_FREE (debug_args_to_skip);
6085 free_dominance_info (CDI_DOMINATORS);
6086 free_dominance_info (CDI_POST_DOMINATORS);
6088 gcc_assert (!id.debug_stmts.exists ());
6089 pop_cfun ();
6090 return;
6093 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6094 the callee and return the inlined body on success. */
6096 tree
6097 maybe_inline_call_in_expr (tree exp)
6099 tree fn = get_callee_fndecl (exp);
6101 /* We can only try to inline "const" functions. */
6102 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6104 call_expr_arg_iterator iter;
6105 copy_body_data id;
6106 tree param, arg, t;
6107 hash_map<tree, tree> decl_map;
6109 /* Remap the parameters. */
6110 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6111 param;
6112 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6113 decl_map.put (param, arg);
6115 memset (&id, 0, sizeof (id));
6116 id.src_fn = fn;
6117 id.dst_fn = current_function_decl;
6118 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6119 id.decl_map = &decl_map;
6121 id.copy_decl = copy_decl_no_change;
6122 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6123 id.transform_new_cfg = false;
6124 id.transform_return_to_modify = true;
6125 id.transform_parameter = true;
6126 id.transform_lang_insert_block = NULL;
6128 /* Make sure not to unshare trees behind the front-end's back
6129 since front-end specific mechanisms may rely on sharing. */
6130 id.regimplify = false;
6131 id.do_not_unshare = true;
6133 /* We're not inside any EH region. */
6134 id.eh_lp_nr = 0;
6136 t = copy_tree_body (&id);
6138 /* We can only return something suitable for use in a GENERIC
6139 expression tree. */
6140 if (TREE_CODE (t) == MODIFY_EXPR)
6141 return TREE_OPERAND (t, 1);
6144 return NULL_TREE;
6147 /* Duplicate a type, fields and all. */
6149 tree
6150 build_duplicate_type (tree type)
6152 struct copy_body_data id;
6154 memset (&id, 0, sizeof (id));
6155 id.src_fn = current_function_decl;
6156 id.dst_fn = current_function_decl;
6157 id.src_cfun = cfun;
6158 id.decl_map = new hash_map<tree, tree>;
6159 id.debug_map = NULL;
6160 id.copy_decl = copy_decl_no_change;
6162 type = remap_type_1 (type, &id);
6164 delete id.decl_map;
6165 if (id.debug_map)
6166 delete id.debug_map;
6168 TYPE_CANONICAL (type) = type;
6170 return type;
6173 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6174 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6175 evaluation. */
6177 tree
6178 copy_fn (tree fn, tree& parms, tree& result)
6180 copy_body_data id;
6181 tree param;
6182 hash_map<tree, tree> decl_map;
6184 tree *p = &parms;
6185 *p = NULL_TREE;
6187 memset (&id, 0, sizeof (id));
6188 id.src_fn = fn;
6189 id.dst_fn = current_function_decl;
6190 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6191 id.decl_map = &decl_map;
6193 id.copy_decl = copy_decl_no_change;
6194 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6195 id.transform_new_cfg = false;
6196 id.transform_return_to_modify = false;
6197 id.transform_parameter = true;
6198 id.transform_lang_insert_block = NULL;
6200 /* Make sure not to unshare trees behind the front-end's back
6201 since front-end specific mechanisms may rely on sharing. */
6202 id.regimplify = false;
6203 id.do_not_unshare = true;
6205 /* We're not inside any EH region. */
6206 id.eh_lp_nr = 0;
6208 /* Remap the parameters and result and return them to the caller. */
6209 for (param = DECL_ARGUMENTS (fn);
6210 param;
6211 param = DECL_CHAIN (param))
6213 *p = remap_decl (param, &id);
6214 p = &DECL_CHAIN (*p);
6217 if (DECL_RESULT (fn))
6218 result = remap_decl (DECL_RESULT (fn), &id);
6219 else
6220 result = NULL_TREE;
6222 return copy_tree_body (&id);