testsuite: localclass2 require LTO
[official-gcc.git] / gcc / tree-inline.c
blobd9814bd10d39f006894a115568acc742820fed38
1 /* Tree inlining.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64 #include "alloc-pool.h"
65 #include "symbol-summary.h"
66 #include "symtab-thunks.h"
67 #include "symtab-clones.h"
69 /* I'm not real happy about this, but we need to handle gimple and
70 non-gimple trees. */
72 /* Inlining, Cloning, Versioning, Parallelization
74 Inlining: a function body is duplicated, but the PARM_DECLs are
75 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
76 MODIFY_EXPRs that store to a dedicated returned-value variable.
77 The duplicated eh_region info of the copy will later be appended
78 to the info for the caller; the eh_region info in copied throwing
79 statements and RESX statements are adjusted accordingly.
81 Cloning: (only in C++) We have one body for a con/de/structor, and
82 multiple function decls, each with a unique parameter list.
83 Duplicate the body, using the given splay tree; some parameters
84 will become constants (like 0 or 1).
86 Versioning: a function body is duplicated and the result is a new
87 function rather than into blocks of an existing function as with
88 inlining. Some parameters will become constants.
90 Parallelization: a region of a function is duplicated resulting in
91 a new function. Variables may be replaced with complex expressions
92 to enable shared variable semantics.
94 All of these will simultaneously lookup any callgraph edges. If
95 we're going to inline the duplicated function body, and the given
96 function has some cloned callgraph nodes (one for each place this
97 function will be inlined) those callgraph edges will be duplicated.
98 If we're cloning the body, those callgraph edges will be
99 updated to point into the new body. (Note that the original
100 callgraph node and edge list will not be altered.)
102 See the CALL_EXPR handling case in copy_tree_body_r (). */
104 /* To Do:
106 o In order to make inlining-on-trees work, we pessimized
107 function-local static constants. In particular, they are now
108 always output, even when not addressed. Fix this by treating
109 function-local static constants just like global static
110 constants; the back-end already knows not to output them if they
111 are not needed.
113 o Provide heuristics to clamp inlining of recursive template
114 calls? */
117 /* Weights that estimate_num_insns uses to estimate the size of the
118 produced code. */
120 eni_weights eni_size_weights;
122 /* Weights that estimate_num_insns uses to estimate the time necessary
123 to execute the produced code. */
125 eni_weights eni_time_weights;
127 /* Prototypes. */
129 static tree declare_return_variable (copy_body_data *, tree, tree,
130 basic_block);
131 static void remap_block (tree *, copy_body_data *);
132 static void copy_bind_expr (tree *, int *, copy_body_data *);
133 static void declare_inline_vars (tree, tree);
134 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
135 static void prepend_lexical_block (tree current_block, tree new_block);
136 static tree copy_result_decl_to_var (tree, copy_body_data *);
137 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
138 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
139 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
141 /* Insert a tree->tree mapping for ID. Despite the name suggests
142 that the trees should be variables, it is used for more than that. */
144 void
145 insert_decl_map (copy_body_data *id, tree key, tree value)
147 id->decl_map->put (key, value);
149 /* Always insert an identity map as well. If we see this same new
150 node again, we won't want to duplicate it a second time. */
151 if (key != value)
152 id->decl_map->put (value, value);
155 /* Insert a tree->tree mapping for ID. This is only used for
156 variables. */
158 static void
159 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
161 if (!gimple_in_ssa_p (id->src_cfun))
162 return;
164 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
165 return;
167 if (!target_for_debug_bind (key))
168 return;
170 gcc_assert (TREE_CODE (key) == PARM_DECL);
171 gcc_assert (VAR_P (value));
173 if (!id->debug_map)
174 id->debug_map = new hash_map<tree, tree>;
176 id->debug_map->put (key, value);
179 /* If nonzero, we're remapping the contents of inlined debug
180 statements. If negative, an error has occurred, such as a
181 reference to a variable that isn't available in the inlined
182 context. */
183 static int processing_debug_stmt = 0;
185 /* Construct new SSA name for old NAME. ID is the inline context. */
187 static tree
188 remap_ssa_name (tree name, copy_body_data *id)
190 tree new_tree, var;
191 tree *n;
193 gcc_assert (TREE_CODE (name) == SSA_NAME);
195 n = id->decl_map->get (name);
196 if (n)
198 /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
199 remove an unused LHS from a call statement. Such LHS can however
200 still appear in debug statements, but their value is lost in this
201 function and we do not want to map them. */
202 if (id->killed_new_ssa_names
203 && id->killed_new_ssa_names->contains (*n))
205 gcc_assert (processing_debug_stmt);
206 processing_debug_stmt = -1;
207 return name;
210 return unshare_expr (*n);
213 if (processing_debug_stmt)
215 if (SSA_NAME_IS_DEFAULT_DEF (name)
216 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
217 && id->entry_bb == NULL
218 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
220 tree vexpr = make_node (DEBUG_EXPR_DECL);
221 gimple *def_temp;
222 gimple_stmt_iterator gsi;
223 tree val = SSA_NAME_VAR (name);
225 n = id->decl_map->get (val);
226 if (n != NULL)
227 val = *n;
228 if (TREE_CODE (val) != PARM_DECL
229 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
231 processing_debug_stmt = -1;
232 return name;
234 n = id->decl_map->get (val);
235 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
236 return *n;
237 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
238 DECL_ARTIFICIAL (vexpr) = 1;
239 TREE_TYPE (vexpr) = TREE_TYPE (name);
240 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
241 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
242 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
243 insert_decl_map (id, val, vexpr);
244 return vexpr;
247 processing_debug_stmt = -1;
248 return name;
251 /* Remap anonymous SSA names or SSA names of anonymous decls. */
252 var = SSA_NAME_VAR (name);
253 if (!var
254 || (!SSA_NAME_IS_DEFAULT_DEF (name)
255 && VAR_P (var)
256 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
257 && DECL_ARTIFICIAL (var)
258 && DECL_IGNORED_P (var)
259 && !DECL_NAME (var)))
261 struct ptr_info_def *pi;
262 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
263 if (!var && SSA_NAME_IDENTIFIER (name))
264 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
265 insert_decl_map (id, name, new_tree);
266 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
267 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
268 /* At least IPA points-to info can be directly transferred. */
269 if (id->src_cfun->gimple_df
270 && id->src_cfun->gimple_df->ipa_pta
271 && POINTER_TYPE_P (TREE_TYPE (name))
272 && (pi = SSA_NAME_PTR_INFO (name))
273 && !pi->pt.anything)
275 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
276 new_pi->pt = pi->pt;
278 /* So can range-info. */
279 if (!POINTER_TYPE_P (TREE_TYPE (name))
280 && SSA_NAME_RANGE_INFO (name))
281 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
282 SSA_NAME_RANGE_INFO (name));
283 return new_tree;
286 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
287 in copy_bb. */
288 new_tree = remap_decl (var, id);
290 /* We might've substituted constant or another SSA_NAME for
291 the variable.
293 Replace the SSA name representing RESULT_DECL by variable during
294 inlining: this saves us from need to introduce PHI node in a case
295 return value is just partly initialized. */
296 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
297 && (!SSA_NAME_VAR (name)
298 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
299 || !id->transform_return_to_modify))
301 struct ptr_info_def *pi;
302 new_tree = make_ssa_name (new_tree);
303 insert_decl_map (id, name, new_tree);
304 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
305 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
306 /* At least IPA points-to info can be directly transferred. */
307 if (id->src_cfun->gimple_df
308 && id->src_cfun->gimple_df->ipa_pta
309 && POINTER_TYPE_P (TREE_TYPE (name))
310 && (pi = SSA_NAME_PTR_INFO (name))
311 && !pi->pt.anything)
313 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
314 new_pi->pt = pi->pt;
316 /* So can range-info. */
317 if (!POINTER_TYPE_P (TREE_TYPE (name))
318 && SSA_NAME_RANGE_INFO (name))
319 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
320 SSA_NAME_RANGE_INFO (name));
321 if (SSA_NAME_IS_DEFAULT_DEF (name))
323 /* By inlining function having uninitialized variable, we might
324 extend the lifetime (variable might get reused). This cause
325 ICE in the case we end up extending lifetime of SSA name across
326 abnormal edge, but also increase register pressure.
328 We simply initialize all uninitialized vars by 0 except
329 for case we are inlining to very first BB. We can avoid
330 this for all BBs that are not inside strongly connected
331 regions of the CFG, but this is expensive to test. */
332 if (id->entry_bb
333 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
334 && (!SSA_NAME_VAR (name)
335 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
336 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
337 0)->dest
338 || EDGE_COUNT (id->entry_bb->preds) != 1))
340 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
341 gimple *init_stmt;
342 tree zero = build_zero_cst (TREE_TYPE (new_tree));
344 init_stmt = gimple_build_assign (new_tree, zero);
345 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
346 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
348 else
350 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
351 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
355 else
356 insert_decl_map (id, name, new_tree);
357 return new_tree;
360 /* Remap DECL during the copying of the BLOCK tree for the function. */
362 tree
363 remap_decl (tree decl, copy_body_data *id)
365 tree *n;
367 /* We only remap local variables in the current function. */
369 /* See if we have remapped this declaration. */
371 n = id->decl_map->get (decl);
373 if (!n && processing_debug_stmt)
375 processing_debug_stmt = -1;
376 return decl;
379 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
380 necessary DECLs have already been remapped and we do not want to duplicate
381 a decl coming from outside of the sequence we are copying. */
382 if (!n
383 && id->prevent_decl_creation_for_types
384 && id->remapping_type_depth > 0
385 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
386 return decl;
388 /* If we didn't already have an equivalent for this declaration, create one
389 now. */
390 if (!n)
392 /* Make a copy of the variable or label. */
393 tree t = id->copy_decl (decl, id);
395 /* Remember it, so that if we encounter this local entity again
396 we can reuse this copy. Do this early because remap_type may
397 need this decl for TYPE_STUB_DECL. */
398 insert_decl_map (id, decl, t);
400 if (!DECL_P (t))
401 return t;
403 /* Remap types, if necessary. */
404 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
405 if (TREE_CODE (t) == TYPE_DECL)
407 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
409 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
410 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
411 is not set on the TYPE_DECL, for example in LTO mode. */
412 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
414 tree x = build_variant_type_copy (TREE_TYPE (t));
415 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
416 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
417 DECL_ORIGINAL_TYPE (t) = x;
421 /* Remap sizes as necessary. */
422 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
423 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
425 /* If fields, do likewise for offset and qualifier. */
426 if (TREE_CODE (t) == FIELD_DECL)
428 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
429 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
430 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
433 return t;
436 if (id->do_not_unshare)
437 return *n;
438 else
439 return unshare_expr (*n);
442 static tree
443 remap_type_1 (tree type, copy_body_data *id)
445 tree new_tree, t;
447 /* We do need a copy. build and register it now. If this is a pointer or
448 reference type, remap the designated type and make a new pointer or
449 reference type. */
450 if (TREE_CODE (type) == POINTER_TYPE)
452 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
453 TYPE_MODE (type),
454 TYPE_REF_CAN_ALIAS_ALL (type));
455 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
456 new_tree = build_type_attribute_qual_variant (new_tree,
457 TYPE_ATTRIBUTES (type),
458 TYPE_QUALS (type));
459 insert_decl_map (id, type, new_tree);
460 return new_tree;
462 else if (TREE_CODE (type) == REFERENCE_TYPE)
464 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
465 TYPE_MODE (type),
466 TYPE_REF_CAN_ALIAS_ALL (type));
467 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
468 new_tree = build_type_attribute_qual_variant (new_tree,
469 TYPE_ATTRIBUTES (type),
470 TYPE_QUALS (type));
471 insert_decl_map (id, type, new_tree);
472 return new_tree;
474 else
475 new_tree = copy_node (type);
477 insert_decl_map (id, type, new_tree);
479 /* This is a new type, not a copy of an old type. Need to reassociate
480 variants. We can handle everything except the main variant lazily. */
481 t = TYPE_MAIN_VARIANT (type);
482 if (type != t)
484 t = remap_type (t, id);
485 TYPE_MAIN_VARIANT (new_tree) = t;
486 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
487 TYPE_NEXT_VARIANT (t) = new_tree;
489 else
491 TYPE_MAIN_VARIANT (new_tree) = new_tree;
492 TYPE_NEXT_VARIANT (new_tree) = NULL;
495 if (TYPE_STUB_DECL (type))
496 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
498 /* Lazily create pointer and reference types. */
499 TYPE_POINTER_TO (new_tree) = NULL;
500 TYPE_REFERENCE_TO (new_tree) = NULL;
502 /* Copy all types that may contain references to local variables; be sure to
503 preserve sharing in between type and its main variant when possible. */
504 switch (TREE_CODE (new_tree))
506 case INTEGER_TYPE:
507 case REAL_TYPE:
508 case FIXED_POINT_TYPE:
509 case ENUMERAL_TYPE:
510 case BOOLEAN_TYPE:
511 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
513 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
514 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
516 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
517 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
519 else
521 t = TYPE_MIN_VALUE (new_tree);
522 if (t && TREE_CODE (t) != INTEGER_CST)
523 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
525 t = TYPE_MAX_VALUE (new_tree);
526 if (t && TREE_CODE (t) != INTEGER_CST)
527 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
529 return new_tree;
531 case FUNCTION_TYPE:
532 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
533 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
534 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
535 else
536 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
537 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
538 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
539 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
540 else
541 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
542 return new_tree;
544 case ARRAY_TYPE:
545 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
546 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
547 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
548 else
549 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
551 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
553 gcc_checking_assert (TYPE_DOMAIN (type)
554 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
555 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
557 else
559 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
560 /* For array bounds where we have decided not to copy over the bounds
561 variable which isn't used in OpenMP/OpenACC region, change them to
562 an uninitialized VAR_DECL temporary. */
563 if (id->adjust_array_error_bounds
564 && TYPE_DOMAIN (new_tree)
565 && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
566 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
568 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
569 DECL_ATTRIBUTES (v)
570 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
571 DECL_ATTRIBUTES (v));
572 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
575 break;
577 case RECORD_TYPE:
578 case UNION_TYPE:
579 case QUAL_UNION_TYPE:
580 if (TYPE_MAIN_VARIANT (type) != type
581 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
582 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
583 else
585 tree f, nf = NULL;
587 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
589 t = remap_decl (f, id);
590 DECL_CONTEXT (t) = new_tree;
591 DECL_CHAIN (t) = nf;
592 nf = t;
594 TYPE_FIELDS (new_tree) = nreverse (nf);
596 break;
598 case OFFSET_TYPE:
599 default:
600 /* Shouldn't have been thought variable sized. */
601 gcc_unreachable ();
604 /* All variants of type share the same size, so use the already remaped data. */
605 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
607 tree s = TYPE_SIZE (type);
608 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
609 tree su = TYPE_SIZE_UNIT (type);
610 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
611 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
612 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
613 || s == mvs);
614 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
615 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
616 || su == mvsu);
617 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
618 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
620 else
622 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
623 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
626 return new_tree;
629 /* Helper function for remap_type_2, called through walk_tree. */
631 static tree
632 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
634 copy_body_data *id = (copy_body_data *) data;
636 if (TYPE_P (*tp))
637 *walk_subtrees = 0;
639 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
640 return *tp;
642 return NULL_TREE;
645 /* Return true if TYPE needs to be remapped because remap_decl on any
646 needed embedded decl returns something other than that decl. */
648 static bool
649 remap_type_2 (tree type, copy_body_data *id)
651 tree t;
653 #define RETURN_TRUE_IF_VAR(T) \
654 do \
656 tree _t = (T); \
657 if (_t) \
659 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
660 return true; \
661 if (!TYPE_SIZES_GIMPLIFIED (type) \
662 && walk_tree (&_t, remap_type_3, id, NULL)) \
663 return true; \
666 while (0)
668 switch (TREE_CODE (type))
670 case POINTER_TYPE:
671 case REFERENCE_TYPE:
672 case FUNCTION_TYPE:
673 case METHOD_TYPE:
674 return remap_type_2 (TREE_TYPE (type), id);
676 case INTEGER_TYPE:
677 case REAL_TYPE:
678 case FIXED_POINT_TYPE:
679 case ENUMERAL_TYPE:
680 case BOOLEAN_TYPE:
681 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
682 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
683 return false;
685 case ARRAY_TYPE:
686 if (remap_type_2 (TREE_TYPE (type), id)
687 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
688 return true;
689 break;
691 case RECORD_TYPE:
692 case UNION_TYPE:
693 case QUAL_UNION_TYPE:
694 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
695 if (TREE_CODE (t) == FIELD_DECL)
697 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
698 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
699 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
700 if (TREE_CODE (type) == QUAL_UNION_TYPE)
701 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
703 break;
705 default:
706 return false;
709 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
710 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
711 return false;
712 #undef RETURN_TRUE_IF_VAR
715 tree
716 remap_type (tree type, copy_body_data *id)
718 tree *node;
719 tree tmp;
721 if (type == NULL)
722 return type;
724 /* See if we have remapped this type. */
725 node = id->decl_map->get (type);
726 if (node)
727 return *node;
729 /* The type only needs remapping if it's variably modified. */
730 if (! variably_modified_type_p (type, id->src_fn)
731 /* Don't remap if copy_decl method doesn't always return a new
732 decl and for all embedded decls returns the passed in decl. */
733 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
735 insert_decl_map (id, type, type);
736 return type;
739 id->remapping_type_depth++;
740 tmp = remap_type_1 (type, id);
741 id->remapping_type_depth--;
743 return tmp;
746 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
748 static bool
749 can_be_nonlocal (tree decl, copy_body_data *id)
751 /* We cannot duplicate function decls. */
752 if (TREE_CODE (decl) == FUNCTION_DECL)
753 return true;
755 /* Local static vars must be non-local or we get multiple declaration
756 problems. */
757 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
758 return true;
760 return false;
763 static tree
764 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
765 copy_body_data *id)
767 tree old_var;
768 tree new_decls = NULL_TREE;
770 /* Remap its variables. */
771 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
773 tree new_var;
775 if (can_be_nonlocal (old_var, id))
777 /* We need to add this variable to the local decls as otherwise
778 nothing else will do so. */
779 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
780 add_local_decl (cfun, old_var);
781 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
782 && !DECL_IGNORED_P (old_var)
783 && nonlocalized_list)
784 vec_safe_push (*nonlocalized_list, old_var);
785 continue;
788 /* Remap the variable. */
789 new_var = remap_decl (old_var, id);
791 /* If we didn't remap this variable, we can't mess with its
792 TREE_CHAIN. If we remapped this variable to the return slot, it's
793 already declared somewhere else, so don't declare it here. */
795 if (new_var == id->retvar)
797 else if (!new_var)
799 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
800 && !DECL_IGNORED_P (old_var)
801 && nonlocalized_list)
802 vec_safe_push (*nonlocalized_list, old_var);
804 else
806 gcc_assert (DECL_P (new_var));
807 DECL_CHAIN (new_var) = new_decls;
808 new_decls = new_var;
810 /* Also copy value-expressions. */
811 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
813 tree tem = DECL_VALUE_EXPR (new_var);
814 bool old_regimplify = id->regimplify;
815 id->remapping_type_depth++;
816 walk_tree (&tem, copy_tree_body_r, id, NULL);
817 id->remapping_type_depth--;
818 id->regimplify = old_regimplify;
819 SET_DECL_VALUE_EXPR (new_var, tem);
824 return nreverse (new_decls);
827 /* Copy the BLOCK to contain remapped versions of the variables
828 therein. And hook the new block into the block-tree. */
830 static void
831 remap_block (tree *block, copy_body_data *id)
833 tree old_block;
834 tree new_block;
836 /* Make the new block. */
837 old_block = *block;
838 new_block = make_node (BLOCK);
839 TREE_USED (new_block) = TREE_USED (old_block);
840 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
841 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
842 BLOCK_NONLOCALIZED_VARS (new_block)
843 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
844 *block = new_block;
846 /* Remap its variables. */
847 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
848 &BLOCK_NONLOCALIZED_VARS (new_block),
849 id);
851 if (id->transform_lang_insert_block)
852 id->transform_lang_insert_block (new_block);
854 /* Remember the remapped block. */
855 insert_decl_map (id, old_block, new_block);
858 /* Copy the whole block tree and root it in id->block. */
860 static tree
861 remap_blocks (tree block, copy_body_data *id)
863 tree t;
864 tree new_tree = block;
866 if (!block)
867 return NULL;
869 remap_block (&new_tree, id);
870 gcc_assert (new_tree != block);
871 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
872 prepend_lexical_block (new_tree, remap_blocks (t, id));
873 /* Blocks are in arbitrary order, but make things slightly prettier and do
874 not swap order when producing a copy. */
875 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
876 return new_tree;
879 /* Remap the block tree rooted at BLOCK to nothing. */
881 static void
882 remap_blocks_to_null (tree block, copy_body_data *id)
884 tree t;
885 insert_decl_map (id, block, NULL_TREE);
886 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
887 remap_blocks_to_null (t, id);
890 /* Remap the location info pointed to by LOCUS. */
892 static location_t
893 remap_location (location_t locus, copy_body_data *id)
895 if (LOCATION_BLOCK (locus))
897 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
898 gcc_assert (n);
899 if (*n)
900 return set_block (locus, *n);
903 locus = LOCATION_LOCUS (locus);
905 if (locus != UNKNOWN_LOCATION && id->block)
906 return set_block (locus, id->block);
908 return locus;
911 static void
912 copy_statement_list (tree *tp)
914 tree_stmt_iterator oi, ni;
915 tree new_tree;
917 new_tree = alloc_stmt_list ();
918 ni = tsi_start (new_tree);
919 oi = tsi_start (*tp);
920 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
921 *tp = new_tree;
923 for (; !tsi_end_p (oi); tsi_next (&oi))
925 tree stmt = tsi_stmt (oi);
926 if (TREE_CODE (stmt) == STATEMENT_LIST)
927 /* This copy is not redundant; tsi_link_after will smash this
928 STATEMENT_LIST into the end of the one we're building, and we
929 don't want to do that with the original. */
930 copy_statement_list (&stmt);
931 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
935 static void
936 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
938 tree block = BIND_EXPR_BLOCK (*tp);
939 /* Copy (and replace) the statement. */
940 copy_tree_r (tp, walk_subtrees, NULL);
941 if (block)
943 remap_block (&block, id);
944 BIND_EXPR_BLOCK (*tp) = block;
947 if (BIND_EXPR_VARS (*tp))
948 /* This will remap a lot of the same decls again, but this should be
949 harmless. */
950 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
954 /* Create a new gimple_seq by remapping all the statements in BODY
955 using the inlining information in ID. */
957 static gimple_seq
958 remap_gimple_seq (gimple_seq body, copy_body_data *id)
960 gimple_stmt_iterator si;
961 gimple_seq new_body = NULL;
963 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
965 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
966 gimple_seq_add_seq (&new_body, new_stmts);
969 return new_body;
973 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
974 block using the mapping information in ID. */
976 static gimple *
977 copy_gimple_bind (gbind *stmt, copy_body_data *id)
979 gimple *new_bind;
980 tree new_block, new_vars;
981 gimple_seq body, new_body;
983 /* Copy the statement. Note that we purposely don't use copy_stmt
984 here because we need to remap statements as we copy. */
985 body = gimple_bind_body (stmt);
986 new_body = remap_gimple_seq (body, id);
988 new_block = gimple_bind_block (stmt);
989 if (new_block)
990 remap_block (&new_block, id);
992 /* This will remap a lot of the same decls again, but this should be
993 harmless. */
994 new_vars = gimple_bind_vars (stmt);
995 if (new_vars)
996 new_vars = remap_decls (new_vars, NULL, id);
998 new_bind = gimple_build_bind (new_vars, new_body, new_block);
1000 return new_bind;
1003 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
1005 static bool
1006 is_parm (tree decl)
1008 if (TREE_CODE (decl) == SSA_NAME)
1010 decl = SSA_NAME_VAR (decl);
1011 if (!decl)
1012 return false;
1015 return (TREE_CODE (decl) == PARM_DECL);
1018 /* Remap the dependence CLIQUE from the source to the destination function
1019 as specified in ID. */
1021 static unsigned short
1022 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1024 if (clique == 0 || processing_debug_stmt)
1025 return 0;
1026 if (!id->dependence_map)
1027 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1028 bool existed;
1029 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1030 if (!existed)
1032 /* Clique 1 is reserved for local ones set by PTA. */
1033 if (cfun->last_clique == 0)
1034 cfun->last_clique = 1;
1035 newc = ++cfun->last_clique;
1037 return newc;
1040 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1041 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1042 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1043 recursing into the children nodes of *TP. */
1045 static tree
1046 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1048 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1049 copy_body_data *id = (copy_body_data *) wi_p->info;
1050 tree fn = id->src_fn;
1052 /* For recursive invocations this is no longer the LHS itself. */
1053 bool is_lhs = wi_p->is_lhs;
1054 wi_p->is_lhs = false;
1056 if (TREE_CODE (*tp) == SSA_NAME)
1058 *tp = remap_ssa_name (*tp, id);
1059 *walk_subtrees = 0;
1060 if (is_lhs)
1061 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1062 return NULL;
1064 else if (auto_var_in_fn_p (*tp, fn))
1066 /* Local variables and labels need to be replaced by equivalent
1067 variables. We don't want to copy static variables; there's
1068 only one of those, no matter how many times we inline the
1069 containing function. Similarly for globals from an outer
1070 function. */
1071 tree new_decl;
1073 /* Remap the declaration. */
1074 new_decl = remap_decl (*tp, id);
1075 gcc_assert (new_decl);
1076 /* Replace this variable with the copy. */
1077 STRIP_TYPE_NOPS (new_decl);
1078 /* ??? The C++ frontend uses void * pointer zero to initialize
1079 any other type. This confuses the middle-end type verification.
1080 As cloned bodies do not go through gimplification again the fixup
1081 there doesn't trigger. */
1082 if (TREE_CODE (new_decl) == INTEGER_CST
1083 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1084 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1085 *tp = new_decl;
1086 *walk_subtrees = 0;
1088 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1089 gcc_unreachable ();
1090 else if (TREE_CODE (*tp) == SAVE_EXPR)
1091 gcc_unreachable ();
1092 else if (TREE_CODE (*tp) == LABEL_DECL
1093 && (!DECL_CONTEXT (*tp)
1094 || decl_function_context (*tp) == id->src_fn))
1095 /* These may need to be remapped for EH handling. */
1096 *tp = remap_decl (*tp, id);
1097 else if (TREE_CODE (*tp) == FIELD_DECL)
1099 /* If the enclosing record type is variably_modified_type_p, the field
1100 has already been remapped. Otherwise, it need not be. */
1101 tree *n = id->decl_map->get (*tp);
1102 if (n)
1103 *tp = *n;
1104 *walk_subtrees = 0;
1106 else if (TYPE_P (*tp))
1107 /* Types may need remapping as well. */
1108 *tp = remap_type (*tp, id);
1109 else if (CONSTANT_CLASS_P (*tp))
1111 /* If this is a constant, we have to copy the node iff the type
1112 will be remapped. copy_tree_r will not copy a constant. */
1113 tree new_type = remap_type (TREE_TYPE (*tp), id);
1115 if (new_type == TREE_TYPE (*tp))
1116 *walk_subtrees = 0;
1118 else if (TREE_CODE (*tp) == INTEGER_CST)
1119 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1120 else
1122 *tp = copy_node (*tp);
1123 TREE_TYPE (*tp) = new_type;
1126 else
1128 /* Otherwise, just copy the node. Note that copy_tree_r already
1129 knows not to copy VAR_DECLs, etc., so this is safe. */
1131 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1133 /* We need to re-canonicalize MEM_REFs from inline substitutions
1134 that can happen when a pointer argument is an ADDR_EXPR.
1135 Recurse here manually to allow that. */
1136 tree ptr = TREE_OPERAND (*tp, 0);
1137 tree type = remap_type (TREE_TYPE (*tp), id);
1138 tree old = *tp;
1139 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1140 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1141 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1142 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1143 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1144 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1146 MR_DEPENDENCE_CLIQUE (*tp)
1147 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1148 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1150 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1151 remapped a parameter as the property might be valid only
1152 for the parameter itself. */
1153 if (TREE_THIS_NOTRAP (old)
1154 && (!is_parm (TREE_OPERAND (old, 0))
1155 || (!id->transform_parameter && is_parm (ptr))))
1156 TREE_THIS_NOTRAP (*tp) = 1;
1157 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1158 *walk_subtrees = 0;
1159 return NULL;
1162 /* Here is the "usual case". Copy this tree node, and then
1163 tweak some special cases. */
1164 copy_tree_r (tp, walk_subtrees, NULL);
1166 if (TREE_CODE (*tp) != OMP_CLAUSE)
1167 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1169 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1171 /* The copied TARGET_EXPR has never been expanded, even if the
1172 original node was expanded already. */
1173 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1174 TREE_OPERAND (*tp, 3) = NULL_TREE;
1176 else if (TREE_CODE (*tp) == ADDR_EXPR)
1178 /* Variable substitution need not be simple. In particular,
1179 the MEM_REF substitution above. Make sure that
1180 TREE_CONSTANT and friends are up-to-date. */
1181 int invariant = is_gimple_min_invariant (*tp);
1182 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1183 recompute_tree_invariant_for_addr_expr (*tp);
1185 /* If this used to be invariant, but is not any longer,
1186 then regimplification is probably needed. */
1187 if (invariant && !is_gimple_min_invariant (*tp))
1188 id->regimplify = true;
1190 *walk_subtrees = 0;
1194 /* Update the TREE_BLOCK for the cloned expr. */
1195 if (EXPR_P (*tp))
1197 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1198 tree old_block = TREE_BLOCK (*tp);
1199 if (old_block)
1201 tree *n;
1202 n = id->decl_map->get (TREE_BLOCK (*tp));
1203 if (n)
1204 new_block = *n;
1206 TREE_SET_BLOCK (*tp, new_block);
1209 /* Keep iterating. */
1210 return NULL_TREE;
1214 /* Called from copy_body_id via walk_tree. DATA is really a
1215 `copy_body_data *'. */
1217 tree
1218 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1220 copy_body_data *id = (copy_body_data *) data;
1221 tree fn = id->src_fn;
1222 tree new_block;
1224 /* Begin by recognizing trees that we'll completely rewrite for the
1225 inlining context. Our output for these trees is completely
1226 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1227 into an edge). Further down, we'll handle trees that get
1228 duplicated and/or tweaked. */
1230 /* When requested, RETURN_EXPRs should be transformed to just the
1231 contained MODIFY_EXPR. The branch semantics of the return will
1232 be handled elsewhere by manipulating the CFG rather than a statement. */
1233 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1235 tree assignment = TREE_OPERAND (*tp, 0);
1237 /* If we're returning something, just turn that into an
1238 assignment into the equivalent of the original RESULT_DECL.
1239 If the "assignment" is just the result decl, the result
1240 decl has already been set (e.g. a recent "foo (&result_decl,
1241 ...)"); just toss the entire RETURN_EXPR. */
1242 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1244 /* Replace the RETURN_EXPR with (a copy of) the
1245 MODIFY_EXPR hanging underneath. */
1246 *tp = copy_node (assignment);
1248 else /* Else the RETURN_EXPR returns no value. */
1250 *tp = NULL;
1251 return (tree) (void *)1;
1254 else if (TREE_CODE (*tp) == SSA_NAME)
1256 *tp = remap_ssa_name (*tp, id);
1257 *walk_subtrees = 0;
1258 return NULL;
1261 /* Local variables and labels need to be replaced by equivalent
1262 variables. We don't want to copy static variables; there's only
1263 one of those, no matter how many times we inline the containing
1264 function. Similarly for globals from an outer function. */
1265 else if (auto_var_in_fn_p (*tp, fn))
1267 tree new_decl;
1269 /* Remap the declaration. */
1270 new_decl = remap_decl (*tp, id);
1271 gcc_assert (new_decl);
1272 /* Replace this variable with the copy. */
1273 STRIP_TYPE_NOPS (new_decl);
1274 *tp = new_decl;
1275 *walk_subtrees = 0;
1277 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1278 copy_statement_list (tp);
1279 else if (TREE_CODE (*tp) == SAVE_EXPR
1280 || TREE_CODE (*tp) == TARGET_EXPR)
1281 remap_save_expr (tp, id->decl_map, walk_subtrees);
1282 else if (TREE_CODE (*tp) == LABEL_DECL
1283 && (! DECL_CONTEXT (*tp)
1284 || decl_function_context (*tp) == id->src_fn))
1285 /* These may need to be remapped for EH handling. */
1286 *tp = remap_decl (*tp, id);
1287 else if (TREE_CODE (*tp) == BIND_EXPR)
1288 copy_bind_expr (tp, walk_subtrees, id);
1289 /* Types may need remapping as well. */
1290 else if (TYPE_P (*tp))
1291 *tp = remap_type (*tp, id);
1293 /* If this is a constant, we have to copy the node iff the type will be
1294 remapped. copy_tree_r will not copy a constant. */
1295 else if (CONSTANT_CLASS_P (*tp))
1297 tree new_type = remap_type (TREE_TYPE (*tp), id);
1299 if (new_type == TREE_TYPE (*tp))
1300 *walk_subtrees = 0;
1302 else if (TREE_CODE (*tp) == INTEGER_CST)
1303 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1304 else
1306 *tp = copy_node (*tp);
1307 TREE_TYPE (*tp) = new_type;
1311 /* Otherwise, just copy the node. Note that copy_tree_r already
1312 knows not to copy VAR_DECLs, etc., so this is safe. */
1313 else
1315 /* Here we handle trees that are not completely rewritten.
1316 First we detect some inlining-induced bogosities for
1317 discarding. */
1318 if (TREE_CODE (*tp) == MODIFY_EXPR
1319 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1320 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1322 /* Some assignments VAR = VAR; don't generate any rtl code
1323 and thus don't count as variable modification. Avoid
1324 keeping bogosities like 0 = 0. */
1325 tree decl = TREE_OPERAND (*tp, 0), value;
1326 tree *n;
1328 n = id->decl_map->get (decl);
1329 if (n)
1331 value = *n;
1332 STRIP_TYPE_NOPS (value);
1333 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1335 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1336 return copy_tree_body_r (tp, walk_subtrees, data);
1340 else if (TREE_CODE (*tp) == INDIRECT_REF)
1342 /* Get rid of *& from inline substitutions that can happen when a
1343 pointer argument is an ADDR_EXPR. */
1344 tree decl = TREE_OPERAND (*tp, 0);
1345 tree *n = id->decl_map->get (decl);
1346 if (n)
1348 /* If we happen to get an ADDR_EXPR in n->value, strip
1349 it manually here as we'll eventually get ADDR_EXPRs
1350 which lie about their types pointed to. In this case
1351 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1352 but we absolutely rely on that. As fold_indirect_ref
1353 does other useful transformations, try that first, though. */
1354 tree type = TREE_TYPE (*tp);
1355 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1356 tree old = *tp;
1357 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1358 if (! *tp)
1360 type = remap_type (type, id);
1361 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1364 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1365 /* ??? We should either assert here or build
1366 a VIEW_CONVERT_EXPR instead of blindly leaking
1367 incompatible types to our IL. */
1368 if (! *tp)
1369 *tp = TREE_OPERAND (ptr, 0);
1371 else
1373 *tp = build1 (INDIRECT_REF, type, ptr);
1374 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1375 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1376 TREE_READONLY (*tp) = TREE_READONLY (old);
1377 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1378 have remapped a parameter as the property might be
1379 valid only for the parameter itself. */
1380 if (TREE_THIS_NOTRAP (old)
1381 && (!is_parm (TREE_OPERAND (old, 0))
1382 || (!id->transform_parameter && is_parm (ptr))))
1383 TREE_THIS_NOTRAP (*tp) = 1;
1386 *walk_subtrees = 0;
1387 return NULL;
1390 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1392 /* We need to re-canonicalize MEM_REFs from inline substitutions
1393 that can happen when a pointer argument is an ADDR_EXPR.
1394 Recurse here manually to allow that. */
1395 tree ptr = TREE_OPERAND (*tp, 0);
1396 tree type = remap_type (TREE_TYPE (*tp), id);
1397 tree old = *tp;
1398 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1399 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1400 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1401 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1402 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1403 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1405 MR_DEPENDENCE_CLIQUE (*tp)
1406 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1407 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1409 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1410 remapped a parameter as the property might be valid only
1411 for the parameter itself. */
1412 if (TREE_THIS_NOTRAP (old)
1413 && (!is_parm (TREE_OPERAND (old, 0))
1414 || (!id->transform_parameter && is_parm (ptr))))
1415 TREE_THIS_NOTRAP (*tp) = 1;
1416 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1417 *walk_subtrees = 0;
1418 return NULL;
1421 /* Here is the "usual case". Copy this tree node, and then
1422 tweak some special cases. */
1423 copy_tree_r (tp, walk_subtrees, NULL);
1425 /* If EXPR has block defined, map it to newly constructed block.
1426 When inlining we want EXPRs without block appear in the block
1427 of function call if we are not remapping a type. */
1428 if (EXPR_P (*tp))
1430 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1431 if (TREE_BLOCK (*tp))
1433 tree *n;
1434 n = id->decl_map->get (TREE_BLOCK (*tp));
1435 if (n)
1436 new_block = *n;
1438 TREE_SET_BLOCK (*tp, new_block);
1441 if (TREE_CODE (*tp) != OMP_CLAUSE)
1442 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1444 /* The copied TARGET_EXPR has never been expanded, even if the
1445 original node was expanded already. */
1446 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1448 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1449 TREE_OPERAND (*tp, 3) = NULL_TREE;
1452 /* Variable substitution need not be simple. In particular, the
1453 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1454 and friends are up-to-date. */
1455 else if (TREE_CODE (*tp) == ADDR_EXPR)
1457 int invariant = is_gimple_min_invariant (*tp);
1458 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1460 /* Handle the case where we substituted an INDIRECT_REF
1461 into the operand of the ADDR_EXPR. */
1462 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1463 && !id->do_not_fold)
1465 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1466 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1467 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1468 *tp = t;
1470 else
1471 recompute_tree_invariant_for_addr_expr (*tp);
1473 /* If this used to be invariant, but is not any longer,
1474 then regimplification is probably needed. */
1475 if (invariant && !is_gimple_min_invariant (*tp))
1476 id->regimplify = true;
1478 *walk_subtrees = 0;
1482 /* Keep iterating. */
1483 return NULL_TREE;
1486 /* Helper for remap_gimple_stmt. Given an EH region number for the
1487 source function, map that to the duplicate EH region number in
1488 the destination function. */
1490 static int
1491 remap_eh_region_nr (int old_nr, copy_body_data *id)
1493 eh_region old_r, new_r;
1495 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1496 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1498 return new_r->index;
1501 /* Similar, but operate on INTEGER_CSTs. */
1503 static tree
1504 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1506 int old_nr, new_nr;
1508 old_nr = tree_to_shwi (old_t_nr);
1509 new_nr = remap_eh_region_nr (old_nr, id);
1511 return build_int_cst (integer_type_node, new_nr);
1514 /* Helper for copy_bb. Remap statement STMT using the inlining
1515 information in ID. Return the new statement copy. */
1517 static gimple_seq
1518 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1520 gimple *copy = NULL;
1521 struct walk_stmt_info wi;
1522 bool skip_first = false;
1523 gimple_seq stmts = NULL;
1525 if (is_gimple_debug (stmt)
1526 && (gimple_debug_nonbind_marker_p (stmt)
1527 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1528 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1529 return NULL;
1531 /* Begin by recognizing trees that we'll completely rewrite for the
1532 inlining context. Our output for these trees is completely
1533 different from our input (e.g. RETURN_EXPR is deleted and morphs
1534 into an edge). Further down, we'll handle trees that get
1535 duplicated and/or tweaked. */
1537 /* When requested, GIMPLE_RETURN should be transformed to just the
1538 contained GIMPLE_ASSIGN. The branch semantics of the return will
1539 be handled elsewhere by manipulating the CFG rather than the
1540 statement. */
1541 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1543 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1545 /* If we're returning something, just turn that into an
1546 assignment to the equivalent of the original RESULT_DECL.
1547 If RETVAL is just the result decl, the result decl has
1548 already been set (e.g. a recent "foo (&result_decl, ...)");
1549 just toss the entire GIMPLE_RETURN. Likewise for when the
1550 call doesn't want the return value. */
1551 if (retval
1552 && (TREE_CODE (retval) != RESULT_DECL
1553 && (!id->call_stmt
1554 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1555 && (TREE_CODE (retval) != SSA_NAME
1556 || ! SSA_NAME_VAR (retval)
1557 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1559 copy = gimple_build_assign (id->do_not_unshare
1560 ? id->retvar : unshare_expr (id->retvar),
1561 retval);
1562 /* id->retvar is already substituted. Skip it on later remapping. */
1563 skip_first = true;
1565 else
1566 return NULL;
1568 else if (gimple_has_substatements (stmt))
1570 gimple_seq s1, s2;
1572 /* When cloning bodies from the C++ front end, we will be handed bodies
1573 in High GIMPLE form. Handle here all the High GIMPLE statements that
1574 have embedded statements. */
1575 switch (gimple_code (stmt))
1577 case GIMPLE_BIND:
1578 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1579 break;
1581 case GIMPLE_CATCH:
1583 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1584 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1585 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1587 break;
1589 case GIMPLE_EH_FILTER:
1590 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1591 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1592 break;
1594 case GIMPLE_TRY:
1595 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1596 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1597 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1598 break;
1600 case GIMPLE_WITH_CLEANUP_EXPR:
1601 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1602 copy = gimple_build_wce (s1);
1603 break;
1605 case GIMPLE_OMP_PARALLEL:
1607 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1608 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1609 copy = gimple_build_omp_parallel
1610 (s1,
1611 gimple_omp_parallel_clauses (omp_par_stmt),
1612 gimple_omp_parallel_child_fn (omp_par_stmt),
1613 gimple_omp_parallel_data_arg (omp_par_stmt));
1615 break;
1617 case GIMPLE_OMP_TASK:
1618 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1619 copy = gimple_build_omp_task
1620 (s1,
1621 gimple_omp_task_clauses (stmt),
1622 gimple_omp_task_child_fn (stmt),
1623 gimple_omp_task_data_arg (stmt),
1624 gimple_omp_task_copy_fn (stmt),
1625 gimple_omp_task_arg_size (stmt),
1626 gimple_omp_task_arg_align (stmt));
1627 break;
1629 case GIMPLE_OMP_FOR:
1630 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1631 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1632 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1633 gimple_omp_for_clauses (stmt),
1634 gimple_omp_for_collapse (stmt), s2);
1636 size_t i;
1637 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1639 gimple_omp_for_set_index (copy, i,
1640 gimple_omp_for_index (stmt, i));
1641 gimple_omp_for_set_initial (copy, i,
1642 gimple_omp_for_initial (stmt, i));
1643 gimple_omp_for_set_final (copy, i,
1644 gimple_omp_for_final (stmt, i));
1645 gimple_omp_for_set_incr (copy, i,
1646 gimple_omp_for_incr (stmt, i));
1647 gimple_omp_for_set_cond (copy, i,
1648 gimple_omp_for_cond (stmt, i));
1651 break;
1653 case GIMPLE_OMP_MASTER:
1654 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1655 copy = gimple_build_omp_master (s1);
1656 break;
1658 case GIMPLE_OMP_TASKGROUP:
1659 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1660 copy = gimple_build_omp_taskgroup
1661 (s1, gimple_omp_taskgroup_clauses (stmt));
1662 break;
1664 case GIMPLE_OMP_ORDERED:
1665 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1666 copy = gimple_build_omp_ordered
1667 (s1,
1668 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1669 break;
1671 case GIMPLE_OMP_SCAN:
1672 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1673 copy = gimple_build_omp_scan
1674 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1675 break;
1677 case GIMPLE_OMP_SECTION:
1678 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1679 copy = gimple_build_omp_section (s1);
1680 break;
1682 case GIMPLE_OMP_SECTIONS:
1683 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1684 copy = gimple_build_omp_sections
1685 (s1, gimple_omp_sections_clauses (stmt));
1686 break;
1688 case GIMPLE_OMP_SINGLE:
1689 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1690 copy = gimple_build_omp_single
1691 (s1, gimple_omp_single_clauses (stmt));
1692 break;
1694 case GIMPLE_OMP_TARGET:
1695 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1696 copy = gimple_build_omp_target
1697 (s1, gimple_omp_target_kind (stmt),
1698 gimple_omp_target_clauses (stmt));
1699 break;
1701 case GIMPLE_OMP_TEAMS:
1702 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1703 copy = gimple_build_omp_teams
1704 (s1, gimple_omp_teams_clauses (stmt));
1705 break;
1707 case GIMPLE_OMP_CRITICAL:
1708 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1709 copy = gimple_build_omp_critical (s1,
1710 gimple_omp_critical_name
1711 (as_a <gomp_critical *> (stmt)),
1712 gimple_omp_critical_clauses
1713 (as_a <gomp_critical *> (stmt)));
1714 break;
1716 case GIMPLE_TRANSACTION:
1718 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1719 gtransaction *new_trans_stmt;
1720 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1721 id);
1722 copy = new_trans_stmt = gimple_build_transaction (s1);
1723 gimple_transaction_set_subcode (new_trans_stmt,
1724 gimple_transaction_subcode (old_trans_stmt));
1725 gimple_transaction_set_label_norm (new_trans_stmt,
1726 gimple_transaction_label_norm (old_trans_stmt));
1727 gimple_transaction_set_label_uninst (new_trans_stmt,
1728 gimple_transaction_label_uninst (old_trans_stmt));
1729 gimple_transaction_set_label_over (new_trans_stmt,
1730 gimple_transaction_label_over (old_trans_stmt));
1732 break;
1734 default:
1735 gcc_unreachable ();
1738 else
1740 if (gimple_assign_copy_p (stmt)
1741 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1742 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1744 /* Here we handle statements that are not completely rewritten.
1745 First we detect some inlining-induced bogosities for
1746 discarding. */
1748 /* Some assignments VAR = VAR; don't generate any rtl code
1749 and thus don't count as variable modification. Avoid
1750 keeping bogosities like 0 = 0. */
1751 tree decl = gimple_assign_lhs (stmt), value;
1752 tree *n;
1754 n = id->decl_map->get (decl);
1755 if (n)
1757 value = *n;
1758 STRIP_TYPE_NOPS (value);
1759 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1760 return NULL;
1764 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1765 in a block that we aren't copying during tree_function_versioning,
1766 just drop the clobber stmt. */
1767 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1769 tree lhs = gimple_assign_lhs (stmt);
1770 if (TREE_CODE (lhs) == MEM_REF
1771 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1773 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1774 if (gimple_bb (def_stmt)
1775 && !bitmap_bit_p (id->blocks_to_copy,
1776 gimple_bb (def_stmt)->index))
1777 return NULL;
1781 /* We do not allow CLOBBERs of handled components. In case
1782 returned value is stored via such handled component, remove
1783 the clobber so stmt verifier is happy. */
1784 if (gimple_clobber_p (stmt)
1785 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1787 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1788 if (!DECL_P (remapped)
1789 && TREE_CODE (remapped) != MEM_REF)
1790 return NULL;
1793 if (gimple_debug_bind_p (stmt))
1795 gdebug *copy
1796 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1797 gimple_debug_bind_get_value (stmt),
1798 stmt);
1799 if (id->reset_location)
1800 gimple_set_location (copy, input_location);
1801 id->debug_stmts.safe_push (copy);
1802 gimple_seq_add_stmt (&stmts, copy);
1803 return stmts;
1805 if (gimple_debug_source_bind_p (stmt))
1807 gdebug *copy = gimple_build_debug_source_bind
1808 (gimple_debug_source_bind_get_var (stmt),
1809 gimple_debug_source_bind_get_value (stmt),
1810 stmt);
1811 if (id->reset_location)
1812 gimple_set_location (copy, input_location);
1813 id->debug_stmts.safe_push (copy);
1814 gimple_seq_add_stmt (&stmts, copy);
1815 return stmts;
1817 if (gimple_debug_nonbind_marker_p (stmt))
1819 /* If the inlined function has too many debug markers,
1820 don't copy them. */
1821 if (id->src_cfun->debug_marker_count
1822 > param_max_debug_marker_count)
1823 return stmts;
1825 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1826 if (id->reset_location)
1827 gimple_set_location (copy, input_location);
1828 id->debug_stmts.safe_push (copy);
1829 gimple_seq_add_stmt (&stmts, copy);
1830 return stmts;
1833 /* Create a new deep copy of the statement. */
1834 copy = gimple_copy (stmt);
1836 /* Clear flags that need revisiting. */
1837 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1839 if (gimple_call_tail_p (call_stmt))
1840 gimple_call_set_tail (call_stmt, false);
1841 if (gimple_call_from_thunk_p (call_stmt))
1842 gimple_call_set_from_thunk (call_stmt, false);
1843 if (gimple_call_internal_p (call_stmt))
1844 switch (gimple_call_internal_fn (call_stmt))
1846 case IFN_GOMP_SIMD_LANE:
1847 case IFN_GOMP_SIMD_VF:
1848 case IFN_GOMP_SIMD_LAST_LANE:
1849 case IFN_GOMP_SIMD_ORDERED_START:
1850 case IFN_GOMP_SIMD_ORDERED_END:
1851 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1852 break;
1853 default:
1854 break;
1858 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1859 RESX and EH_DISPATCH. */
1860 if (id->eh_map)
1861 switch (gimple_code (copy))
1863 case GIMPLE_CALL:
1865 tree r, fndecl = gimple_call_fndecl (copy);
1866 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1867 switch (DECL_FUNCTION_CODE (fndecl))
1869 case BUILT_IN_EH_COPY_VALUES:
1870 r = gimple_call_arg (copy, 1);
1871 r = remap_eh_region_tree_nr (r, id);
1872 gimple_call_set_arg (copy, 1, r);
1873 /* FALLTHRU */
1875 case BUILT_IN_EH_POINTER:
1876 case BUILT_IN_EH_FILTER:
1877 r = gimple_call_arg (copy, 0);
1878 r = remap_eh_region_tree_nr (r, id);
1879 gimple_call_set_arg (copy, 0, r);
1880 break;
1882 default:
1883 break;
1886 /* Reset alias info if we didn't apply measures to
1887 keep it valid over inlining by setting DECL_PT_UID. */
1888 if (!id->src_cfun->gimple_df
1889 || !id->src_cfun->gimple_df->ipa_pta)
1890 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1892 break;
1894 case GIMPLE_RESX:
1896 gresx *resx_stmt = as_a <gresx *> (copy);
1897 int r = gimple_resx_region (resx_stmt);
1898 r = remap_eh_region_nr (r, id);
1899 gimple_resx_set_region (resx_stmt, r);
1901 break;
1903 case GIMPLE_EH_DISPATCH:
1905 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1906 int r = gimple_eh_dispatch_region (eh_dispatch);
1907 r = remap_eh_region_nr (r, id);
1908 gimple_eh_dispatch_set_region (eh_dispatch, r);
1910 break;
1912 default:
1913 break;
1917 /* If STMT has a block defined, map it to the newly constructed block. */
1918 if (tree block = gimple_block (copy))
1920 tree *n;
1921 n = id->decl_map->get (block);
1922 gcc_assert (n);
1923 gimple_set_block (copy, *n);
1925 if (id->param_body_adjs)
1927 gimple_seq extra_stmts = NULL;
1928 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
1929 if (!gimple_seq_empty_p (extra_stmts))
1931 memset (&wi, 0, sizeof (wi));
1932 wi.info = id;
1933 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1934 !gsi_end_p (egsi);
1935 gsi_next (&egsi))
1936 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1937 gimple_seq_add_seq (&stmts, extra_stmts);
1941 if (id->reset_location)
1942 gimple_set_location (copy, input_location);
1944 /* Debug statements ought to be rebuilt and not copied. */
1945 gcc_checking_assert (!is_gimple_debug (copy));
1947 /* Remap all the operands in COPY. */
1948 memset (&wi, 0, sizeof (wi));
1949 wi.info = id;
1950 if (skip_first)
1951 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1952 else
1953 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1955 /* Clear the copied virtual operands. We are not remapping them here
1956 but are going to recreate them from scratch. */
1957 if (gimple_has_mem_ops (copy))
1959 gimple_set_vdef (copy, NULL_TREE);
1960 gimple_set_vuse (copy, NULL_TREE);
1963 if (cfun->can_throw_non_call_exceptions)
1965 /* When inlining a function which does not have non-call exceptions
1966 enabled into a function that has (which only happens with
1967 always-inline) we have to fixup stmts that cannot throw. */
1968 if (gcond *cond = dyn_cast <gcond *> (copy))
1969 if (gimple_could_trap_p (cond))
1971 gassign *cmp
1972 = gimple_build_assign (make_ssa_name (boolean_type_node),
1973 gimple_cond_code (cond),
1974 gimple_cond_lhs (cond),
1975 gimple_cond_rhs (cond));
1976 gimple_seq_add_stmt (&stmts, cmp);
1977 gimple_cond_set_code (cond, NE_EXPR);
1978 gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
1979 gimple_cond_set_rhs (cond, boolean_false_node);
1983 gimple_seq_add_stmt (&stmts, copy);
1984 return stmts;
1988 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1989 later */
1991 static basic_block
1992 copy_bb (copy_body_data *id, basic_block bb,
1993 profile_count num, profile_count den)
1995 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1996 basic_block copy_basic_block;
1997 tree decl;
1998 basic_block prev;
2000 profile_count::adjust_for_ipa_scaling (&num, &den);
2002 /* Search for previous copied basic block. */
2003 prev = bb->prev_bb;
2004 while (!prev->aux)
2005 prev = prev->prev_bb;
2007 /* create_basic_block() will append every new block to
2008 basic_block_info automatically. */
2009 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2010 copy_basic_block->count = bb->count.apply_scale (num, den);
2012 copy_gsi = gsi_start_bb (copy_basic_block);
2014 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2016 gimple_seq stmts;
2017 gimple *stmt = gsi_stmt (gsi);
2018 gimple *orig_stmt = stmt;
2019 gimple_stmt_iterator stmts_gsi;
2020 bool stmt_added = false;
2022 id->regimplify = false;
2023 stmts = remap_gimple_stmt (stmt, id);
2025 if (gimple_seq_empty_p (stmts))
2026 continue;
2028 seq_gsi = copy_gsi;
2030 for (stmts_gsi = gsi_start (stmts);
2031 !gsi_end_p (stmts_gsi); )
2033 stmt = gsi_stmt (stmts_gsi);
2035 /* Advance iterator now before stmt is moved to seq_gsi. */
2036 gsi_next (&stmts_gsi);
2038 if (gimple_nop_p (stmt))
2039 continue;
2041 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2042 orig_stmt);
2044 /* With return slot optimization we can end up with
2045 non-gimple (foo *)&this->m, fix that here. */
2046 if (is_gimple_assign (stmt)
2047 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2048 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2050 tree new_rhs;
2051 new_rhs = force_gimple_operand_gsi (&seq_gsi,
2052 gimple_assign_rhs1 (stmt),
2053 true, NULL, false,
2054 GSI_CONTINUE_LINKING);
2055 gimple_assign_set_rhs1 (stmt, new_rhs);
2056 id->regimplify = false;
2059 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2061 if (id->regimplify)
2062 gimple_regimplify_operands (stmt, &seq_gsi);
2064 stmt_added = true;
2067 if (!stmt_added)
2068 continue;
2070 /* If copy_basic_block has been empty at the start of this iteration,
2071 call gsi_start_bb again to get at the newly added statements. */
2072 if (gsi_end_p (copy_gsi))
2073 copy_gsi = gsi_start_bb (copy_basic_block);
2074 else
2075 gsi_next (&copy_gsi);
2077 /* Process the new statement. The call to gimple_regimplify_operands
2078 possibly turned the statement into multiple statements, we
2079 need to process all of them. */
2082 tree fn;
2083 gcall *call_stmt;
2085 stmt = gsi_stmt (copy_gsi);
2086 call_stmt = dyn_cast <gcall *> (stmt);
2087 if (call_stmt
2088 && gimple_call_va_arg_pack_p (call_stmt)
2089 && id->call_stmt
2090 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2092 /* __builtin_va_arg_pack () should be replaced by
2093 all arguments corresponding to ... in the caller. */
2094 tree p;
2095 gcall *new_call;
2096 vec<tree> argarray;
2097 size_t nargs = gimple_call_num_args (id->call_stmt);
2098 size_t n;
2100 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2101 nargs--;
2103 /* Create the new array of arguments. */
2104 n = nargs + gimple_call_num_args (call_stmt);
2105 argarray.create (n);
2106 argarray.safe_grow_cleared (n, true);
2108 /* Copy all the arguments before '...' */
2109 memcpy (argarray.address (),
2110 gimple_call_arg_ptr (call_stmt, 0),
2111 gimple_call_num_args (call_stmt) * sizeof (tree));
2113 /* Append the arguments passed in '...' */
2114 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2115 gimple_call_arg_ptr (id->call_stmt, 0)
2116 + (gimple_call_num_args (id->call_stmt) - nargs),
2117 nargs * sizeof (tree));
2119 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2120 argarray);
2122 argarray.release ();
2124 /* Copy all GIMPLE_CALL flags, location and block, except
2125 GF_CALL_VA_ARG_PACK. */
2126 gimple_call_copy_flags (new_call, call_stmt);
2127 gimple_call_set_va_arg_pack (new_call, false);
2128 /* location includes block. */
2129 gimple_set_location (new_call, gimple_location (stmt));
2130 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2132 gsi_replace (&copy_gsi, new_call, false);
2133 stmt = new_call;
2135 else if (call_stmt
2136 && id->call_stmt
2137 && (decl = gimple_call_fndecl (stmt))
2138 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2140 /* __builtin_va_arg_pack_len () should be replaced by
2141 the number of anonymous arguments. */
2142 size_t nargs = gimple_call_num_args (id->call_stmt);
2143 tree count, p;
2144 gimple *new_stmt;
2146 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2147 nargs--;
2149 if (!gimple_call_lhs (stmt))
2151 /* Drop unused calls. */
2152 gsi_remove (&copy_gsi, false);
2153 continue;
2155 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2157 count = build_int_cst (integer_type_node, nargs);
2158 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2159 gsi_replace (&copy_gsi, new_stmt, false);
2160 stmt = new_stmt;
2162 else if (nargs != 0)
2164 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2165 count = build_int_cst (integer_type_node, nargs);
2166 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2167 PLUS_EXPR, newlhs, count);
2168 gimple_call_set_lhs (stmt, newlhs);
2169 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2172 else if (call_stmt
2173 && id->call_stmt
2174 && gimple_call_internal_p (stmt)
2175 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2177 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2178 gsi_remove (&copy_gsi, false);
2179 continue;
2182 /* Statements produced by inlining can be unfolded, especially
2183 when we constant propagated some operands. We can't fold
2184 them right now for two reasons:
2185 1) folding require SSA_NAME_DEF_STMTs to be correct
2186 2) we can't change function calls to builtins.
2187 So we just mark statement for later folding. We mark
2188 all new statements, instead just statements that has changed
2189 by some nontrivial substitution so even statements made
2190 foldable indirectly are updated. If this turns out to be
2191 expensive, copy_body can be told to watch for nontrivial
2192 changes. */
2193 if (id->statements_to_fold)
2194 id->statements_to_fold->add (stmt);
2196 /* We're duplicating a CALL_EXPR. Find any corresponding
2197 callgraph edges and update or duplicate them. */
2198 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2200 struct cgraph_edge *edge;
2202 switch (id->transform_call_graph_edges)
2204 case CB_CGE_DUPLICATE:
2205 edge = id->src_node->get_edge (orig_stmt);
2206 if (edge)
2208 struct cgraph_edge *old_edge = edge;
2210 /* A speculative call is consist of multiple
2211 edges - indirect edge and one or more direct edges
2212 Duplicate the whole thing and distribute frequencies
2213 accordingly. */
2214 if (edge->speculative)
2216 int n = 0;
2217 profile_count direct_cnt
2218 = profile_count::zero ();
2220 /* First figure out the distribution of counts
2221 so we can re-scale BB profile accordingly. */
2222 for (cgraph_edge *e = old_edge; e;
2223 e = e->next_speculative_call_target ())
2224 direct_cnt = direct_cnt + e->count;
2226 cgraph_edge *indirect
2227 = old_edge->speculative_call_indirect_edge ();
2228 profile_count indir_cnt = indirect->count;
2230 /* Next iterate all direct edges, clone it and its
2231 corresponding reference and update profile. */
2232 for (cgraph_edge *e = old_edge;
2234 e = e->next_speculative_call_target ())
2236 profile_count cnt = e->count;
2238 id->dst_node->clone_reference
2239 (e->speculative_call_target_ref (), stmt);
2240 edge = e->clone (id->dst_node, call_stmt,
2241 gimple_uid (stmt), num, den,
2242 true);
2243 profile_probability prob
2244 = cnt.probability_in (direct_cnt
2245 + indir_cnt);
2246 edge->count
2247 = copy_basic_block->count.apply_probability
2248 (prob);
2249 n++;
2251 gcc_checking_assert
2252 (indirect->num_speculative_call_targets_p ()
2253 == n);
2255 /* Duplicate the indirect edge after all direct edges
2256 cloned. */
2257 indirect = indirect->clone (id->dst_node, call_stmt,
2258 gimple_uid (stmt),
2259 num, den,
2260 true);
2262 profile_probability prob
2263 = indir_cnt.probability_in (direct_cnt
2264 + indir_cnt);
2265 indirect->count
2266 = copy_basic_block->count.apply_probability (prob);
2268 else
2270 edge = edge->clone (id->dst_node, call_stmt,
2271 gimple_uid (stmt),
2272 num, den,
2273 true);
2274 edge->count = copy_basic_block->count;
2277 break;
2279 case CB_CGE_MOVE_CLONES:
2280 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2281 call_stmt);
2282 edge = id->dst_node->get_edge (stmt);
2283 break;
2285 case CB_CGE_MOVE:
2286 edge = id->dst_node->get_edge (orig_stmt);
2287 if (edge)
2288 edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2289 break;
2291 default:
2292 gcc_unreachable ();
2295 /* Constant propagation on argument done during inlining
2296 may create new direct call. Produce an edge for it. */
2297 if ((!edge
2298 || (edge->indirect_inlining_edge
2299 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2300 && id->dst_node->definition
2301 && (fn = gimple_call_fndecl (stmt)) != NULL)
2303 struct cgraph_node *dest = cgraph_node::get_create (fn);
2305 /* We have missing edge in the callgraph. This can happen
2306 when previous inlining turned an indirect call into a
2307 direct call by constant propagating arguments or we are
2308 producing dead clone (for further cloning). In all
2309 other cases we hit a bug (incorrect node sharing is the
2310 most common reason for missing edges). */
2311 gcc_assert (!dest->definition
2312 || dest->address_taken
2313 || !id->src_node->definition
2314 || !id->dst_node->definition);
2315 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2316 id->dst_node->create_edge_including_clones
2317 (dest, orig_stmt, call_stmt, bb->count,
2318 CIF_ORIGINALLY_INDIRECT_CALL);
2319 else
2320 id->dst_node->create_edge (dest, call_stmt,
2321 bb->count)->inline_failed
2322 = CIF_ORIGINALLY_INDIRECT_CALL;
2323 if (dump_file)
2325 fprintf (dump_file, "Created new direct edge to %s\n",
2326 dest->dump_name ());
2330 notice_special_calls (as_a <gcall *> (stmt));
2333 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2334 id->eh_map, id->eh_lp_nr);
2336 gsi_next (&copy_gsi);
2338 while (!gsi_end_p (copy_gsi));
2340 copy_gsi = gsi_last_bb (copy_basic_block);
2343 return copy_basic_block;
2346 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2347 form is quite easy, since dominator relationship for old basic blocks does
2348 not change.
2350 There is however exception where inlining might change dominator relation
2351 across EH edges from basic block within inlined functions destinating
2352 to landing pads in function we inline into.
2354 The function fills in PHI_RESULTs of such PHI nodes if they refer
2355 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2356 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2357 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2358 set, and this means that there will be no overlapping live ranges
2359 for the underlying symbol.
2361 This might change in future if we allow redirecting of EH edges and
2362 we might want to change way build CFG pre-inlining to include
2363 all the possible edges then. */
2364 static void
2365 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2366 bool can_throw, bool nonlocal_goto)
2368 edge e;
2369 edge_iterator ei;
2371 FOR_EACH_EDGE (e, ei, bb->succs)
2372 if (!e->dest->aux
2373 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2375 gphi *phi;
2376 gphi_iterator si;
2378 if (!nonlocal_goto)
2379 gcc_assert (e->flags & EDGE_EH);
2381 if (!can_throw)
2382 gcc_assert (!(e->flags & EDGE_EH));
2384 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2386 edge re;
2388 phi = si.phi ();
2390 /* For abnormal goto/call edges the receiver can be the
2391 ENTRY_BLOCK. Do not assert this cannot happen. */
2393 gcc_assert ((e->flags & EDGE_EH)
2394 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2396 re = find_edge (ret_bb, e->dest);
2397 gcc_checking_assert (re);
2398 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2399 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2401 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2402 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2407 /* Insert clobbers for automatic variables of inlined ID->src_fn
2408 function at the start of basic block ID->eh_landing_pad_dest. */
2410 static void
2411 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2413 tree var;
2414 basic_block bb = id->eh_landing_pad_dest;
2415 live_vars_map *vars = NULL;
2416 unsigned int cnt = 0;
2417 unsigned int i;
2418 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2419 if (VAR_P (var)
2420 && !DECL_HARD_REGISTER (var)
2421 && !TREE_THIS_VOLATILE (var)
2422 && !DECL_HAS_VALUE_EXPR_P (var)
2423 && !is_gimple_reg (var)
2424 && auto_var_in_fn_p (var, id->src_fn)
2425 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2427 tree *t = id->decl_map->get (var);
2428 if (!t)
2429 continue;
2430 tree new_var = *t;
2431 if (VAR_P (new_var)
2432 && !DECL_HARD_REGISTER (new_var)
2433 && !TREE_THIS_VOLATILE (new_var)
2434 && !DECL_HAS_VALUE_EXPR_P (new_var)
2435 && !is_gimple_reg (new_var)
2436 && auto_var_in_fn_p (new_var, id->dst_fn))
2438 if (vars == NULL)
2439 vars = new live_vars_map;
2440 vars->put (DECL_UID (var), cnt++);
2443 if (vars == NULL)
2444 return;
2446 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2447 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2448 if (VAR_P (var))
2450 edge e;
2451 edge_iterator ei;
2452 bool needed = false;
2453 unsigned int *v = vars->get (DECL_UID (var));
2454 if (v == NULL)
2455 continue;
2456 FOR_EACH_EDGE (e, ei, bb->preds)
2457 if ((e->flags & EDGE_EH) != 0
2458 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2460 basic_block src_bb = (basic_block) e->src->aux;
2462 if (bitmap_bit_p (&live[src_bb->index], *v))
2464 needed = true;
2465 break;
2468 if (needed)
2470 tree new_var = *id->decl_map->get (var);
2471 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2472 tree clobber = build_clobber (TREE_TYPE (new_var));
2473 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2474 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2477 destroy_live_vars (live);
2478 delete vars;
2481 /* Copy edges from BB into its copy constructed earlier, scale profile
2482 accordingly. Edges will be taken care of later. Assume aux
2483 pointers to point to the copies of each BB. Return true if any
2484 debug stmts are left after a statement that must end the basic block. */
2486 static bool
2487 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2488 basic_block ret_bb, basic_block abnormal_goto_dest,
2489 copy_body_data *id)
2491 basic_block new_bb = (basic_block) bb->aux;
2492 edge_iterator ei;
2493 edge old_edge;
2494 gimple_stmt_iterator si;
2495 bool need_debug_cleanup = false;
2497 /* Use the indices from the original blocks to create edges for the
2498 new ones. */
2499 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2500 if (!(old_edge->flags & EDGE_EH))
2502 edge new_edge;
2503 int flags = old_edge->flags;
2504 location_t locus = old_edge->goto_locus;
2506 /* Return edges do get a FALLTHRU flag when they get inlined. */
2507 if (old_edge->dest->index == EXIT_BLOCK
2508 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2509 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2510 flags |= EDGE_FALLTHRU;
2512 new_edge
2513 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2514 new_edge->probability = old_edge->probability;
2515 if (!id->reset_location)
2516 new_edge->goto_locus = remap_location (locus, id);
2519 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2520 return false;
2522 /* When doing function splitting, we must decrease count of the return block
2523 which was previously reachable by block we did not copy. */
2524 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2525 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2526 if (old_edge->src->index != ENTRY_BLOCK
2527 && !old_edge->src->aux)
2528 new_bb->count -= old_edge->count ().apply_scale (num, den);
2530 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2532 gimple *copy_stmt;
2533 bool can_throw, nonlocal_goto;
2535 copy_stmt = gsi_stmt (si);
2536 if (!is_gimple_debug (copy_stmt))
2537 update_stmt (copy_stmt);
2539 /* Do this before the possible split_block. */
2540 gsi_next (&si);
2542 /* If this tree could throw an exception, there are two
2543 cases where we need to add abnormal edge(s): the
2544 tree wasn't in a region and there is a "current
2545 region" in the caller; or the original tree had
2546 EH edges. In both cases split the block after the tree,
2547 and add abnormal edge(s) as needed; we need both
2548 those from the callee and the caller.
2549 We check whether the copy can throw, because the const
2550 propagation can change an INDIRECT_REF which throws
2551 into a COMPONENT_REF which doesn't. If the copy
2552 can throw, the original could also throw. */
2553 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2554 nonlocal_goto
2555 = (stmt_can_make_abnormal_goto (copy_stmt)
2556 && !computed_goto_p (copy_stmt));
2558 if (can_throw || nonlocal_goto)
2560 if (!gsi_end_p (si))
2562 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2563 gsi_next (&si);
2564 if (gsi_end_p (si))
2565 need_debug_cleanup = true;
2567 if (!gsi_end_p (si))
2568 /* Note that bb's predecessor edges aren't necessarily
2569 right at this point; split_block doesn't care. */
2571 edge e = split_block (new_bb, copy_stmt);
2573 new_bb = e->dest;
2574 new_bb->aux = e->src->aux;
2575 si = gsi_start_bb (new_bb);
2579 bool update_probs = false;
2581 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2583 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2584 update_probs = true;
2586 else if (can_throw)
2588 make_eh_edges (copy_stmt);
2589 update_probs = true;
2592 /* EH edges may not match old edges. Copy as much as possible. */
2593 if (update_probs)
2595 edge e;
2596 edge_iterator ei;
2597 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2599 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2600 if ((old_edge->flags & EDGE_EH)
2601 && (e = find_edge (copy_stmt_bb,
2602 (basic_block) old_edge->dest->aux))
2603 && (e->flags & EDGE_EH))
2604 e->probability = old_edge->probability;
2606 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2607 if (e->flags & EDGE_EH)
2609 if (!e->probability.initialized_p ())
2610 e->probability = profile_probability::never ();
2611 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2613 if (id->eh_landing_pad_dest == NULL)
2614 id->eh_landing_pad_dest = e->dest;
2615 else
2616 gcc_assert (id->eh_landing_pad_dest == e->dest);
2622 /* If the call we inline cannot make abnormal goto do not add
2623 additional abnormal edges but only retain those already present
2624 in the original function body. */
2625 if (abnormal_goto_dest == NULL)
2626 nonlocal_goto = false;
2627 if (nonlocal_goto)
2629 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2631 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2632 nonlocal_goto = false;
2633 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2634 in OpenMP regions which aren't allowed to be left abnormally.
2635 So, no need to add abnormal edge in that case. */
2636 else if (is_gimple_call (copy_stmt)
2637 && gimple_call_internal_p (copy_stmt)
2638 && (gimple_call_internal_fn (copy_stmt)
2639 == IFN_ABNORMAL_DISPATCHER)
2640 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2641 nonlocal_goto = false;
2642 else
2643 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2644 EDGE_ABNORMAL);
2647 if ((can_throw || nonlocal_goto)
2648 && gimple_in_ssa_p (cfun))
2649 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2650 can_throw, nonlocal_goto);
2652 return need_debug_cleanup;
2655 /* Copy the PHIs. All blocks and edges are copied, some blocks
2656 was possibly split and new outgoing EH edges inserted.
2657 BB points to the block of original function and AUX pointers links
2658 the original and newly copied blocks. */
2660 static void
2661 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2663 basic_block const new_bb = (basic_block) bb->aux;
2664 edge_iterator ei;
2665 gphi *phi;
2666 gphi_iterator si;
2667 edge new_edge;
2668 bool inserted = false;
2670 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2672 tree res, new_res;
2673 gphi *new_phi;
2675 phi = si.phi ();
2676 res = PHI_RESULT (phi);
2677 new_res = res;
2678 if (!virtual_operand_p (res))
2680 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2681 if (EDGE_COUNT (new_bb->preds) == 0)
2683 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2684 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2686 else
2688 new_phi = create_phi_node (new_res, new_bb);
2689 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2691 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2692 bb);
2693 tree arg;
2694 tree new_arg;
2695 edge_iterator ei2;
2696 location_t locus;
2698 /* When doing partial cloning, we allow PHIs on the entry
2699 block as long as all the arguments are the same.
2700 Find any input edge to see argument to copy. */
2701 if (!old_edge)
2702 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2703 if (!old_edge->src->aux)
2704 break;
2706 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2707 new_arg = arg;
2708 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2709 gcc_assert (new_arg);
2710 /* With return slot optimization we can end up with
2711 non-gimple (foo *)&this->m, fix that here. */
2712 if (TREE_CODE (new_arg) != SSA_NAME
2713 && TREE_CODE (new_arg) != FUNCTION_DECL
2714 && !is_gimple_val (new_arg))
2716 gimple_seq stmts = NULL;
2717 new_arg = force_gimple_operand (new_arg, &stmts, true,
2718 NULL);
2719 gsi_insert_seq_on_edge (new_edge, stmts);
2720 inserted = true;
2722 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2723 if (id->reset_location)
2724 locus = input_location;
2725 else
2726 locus = remap_location (locus, id);
2727 add_phi_arg (new_phi, new_arg, new_edge, locus);
2733 /* Commit the delayed edge insertions. */
2734 if (inserted)
2735 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2736 gsi_commit_one_edge_insert (new_edge, NULL);
2740 /* Wrapper for remap_decl so it can be used as a callback. */
2742 static tree
2743 remap_decl_1 (tree decl, void *data)
2745 return remap_decl (decl, (copy_body_data *) data);
2748 /* Build struct function and associated datastructures for the new clone
2749 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2750 the cfun to the function of new_fndecl (and current_function_decl too). */
2752 static void
2753 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2755 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2757 if (!DECL_ARGUMENTS (new_fndecl))
2758 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2759 if (!DECL_RESULT (new_fndecl))
2760 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2762 /* Register specific tree functions. */
2763 gimple_register_cfg_hooks ();
2765 /* Get clean struct function. */
2766 push_struct_function (new_fndecl);
2768 /* We will rebuild these, so just sanity check that they are empty. */
2769 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2770 gcc_assert (cfun->local_decls == NULL);
2771 gcc_assert (cfun->cfg == NULL);
2772 gcc_assert (cfun->decl == new_fndecl);
2774 /* Copy items we preserve during cloning. */
2775 cfun->static_chain_decl = src_cfun->static_chain_decl;
2776 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2777 cfun->function_end_locus = src_cfun->function_end_locus;
2778 cfun->curr_properties = src_cfun->curr_properties;
2779 cfun->last_verified = src_cfun->last_verified;
2780 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2781 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2782 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2783 cfun->calls_eh_return = src_cfun->calls_eh_return;
2784 cfun->stdarg = src_cfun->stdarg;
2785 cfun->after_inlining = src_cfun->after_inlining;
2786 cfun->can_throw_non_call_exceptions
2787 = src_cfun->can_throw_non_call_exceptions;
2788 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2789 cfun->returns_struct = src_cfun->returns_struct;
2790 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2792 init_empty_tree_cfg ();
2794 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2796 profile_count num = count;
2797 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2798 profile_count::adjust_for_ipa_scaling (&num, &den);
2800 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2801 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2802 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2803 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2804 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2805 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2806 if (src_cfun->eh)
2807 init_eh_for_function ();
2809 if (src_cfun->gimple_df)
2811 init_tree_ssa (cfun);
2812 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2813 if (cfun->gimple_df->in_ssa_p)
2814 init_ssa_operands (cfun);
2818 /* Helper function for copy_cfg_body. Move debug stmts from the end
2819 of NEW_BB to the beginning of successor basic blocks when needed. If the
2820 successor has multiple predecessors, reset them, otherwise keep
2821 their value. */
2823 static void
2824 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2826 edge e;
2827 edge_iterator ei;
2828 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2830 if (gsi_end_p (si)
2831 || gsi_one_before_end_p (si)
2832 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2833 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2834 return;
2836 FOR_EACH_EDGE (e, ei, new_bb->succs)
2838 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2839 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2840 while (is_gimple_debug (gsi_stmt (ssi)))
2842 gimple *stmt = gsi_stmt (ssi);
2843 gdebug *new_stmt;
2844 tree var;
2845 tree value;
2847 /* For the last edge move the debug stmts instead of copying
2848 them. */
2849 if (ei_one_before_end_p (ei))
2851 si = ssi;
2852 gsi_prev (&ssi);
2853 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2855 gimple_debug_bind_reset_value (stmt);
2856 gimple_set_location (stmt, UNKNOWN_LOCATION);
2858 gsi_remove (&si, false);
2859 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2860 continue;
2863 if (gimple_debug_bind_p (stmt))
2865 var = gimple_debug_bind_get_var (stmt);
2866 if (single_pred_p (e->dest))
2868 value = gimple_debug_bind_get_value (stmt);
2869 value = unshare_expr (value);
2870 new_stmt = gimple_build_debug_bind (var, value, stmt);
2872 else
2873 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2875 else if (gimple_debug_source_bind_p (stmt))
2877 var = gimple_debug_source_bind_get_var (stmt);
2878 value = gimple_debug_source_bind_get_value (stmt);
2879 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2881 else if (gimple_debug_nonbind_marker_p (stmt))
2882 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2883 else
2884 gcc_unreachable ();
2885 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2886 id->debug_stmts.safe_push (new_stmt);
2887 gsi_prev (&ssi);
2892 /* Make a copy of the sub-loops of SRC_PARENT and place them
2893 as siblings of DEST_PARENT. */
2895 static void
2896 copy_loops (copy_body_data *id,
2897 class loop *dest_parent, class loop *src_parent)
2899 class loop *src_loop = src_parent->inner;
2900 while (src_loop)
2902 if (!id->blocks_to_copy
2903 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2905 class loop *dest_loop = alloc_loop ();
2907 /* Assign the new loop its header and latch and associate
2908 those with the new loop. */
2909 dest_loop->header = (basic_block)src_loop->header->aux;
2910 dest_loop->header->loop_father = dest_loop;
2911 if (src_loop->latch != NULL)
2913 dest_loop->latch = (basic_block)src_loop->latch->aux;
2914 dest_loop->latch->loop_father = dest_loop;
2917 /* Copy loop meta-data. */
2918 copy_loop_info (src_loop, dest_loop);
2919 if (dest_loop->unroll)
2920 cfun->has_unroll = true;
2921 if (dest_loop->force_vectorize)
2922 cfun->has_force_vectorize_loops = true;
2923 if (id->src_cfun->last_clique != 0)
2924 dest_loop->owned_clique
2925 = remap_dependence_clique (id,
2926 src_loop->owned_clique
2927 ? src_loop->owned_clique : 1);
2929 /* Finally place it into the loop array and the loop tree. */
2930 place_new_loop (cfun, dest_loop);
2931 flow_loop_tree_node_add (dest_parent, dest_loop);
2933 if (src_loop->simduid)
2935 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2936 cfun->has_simduid_loops = true;
2939 /* Recurse. */
2940 copy_loops (id, dest_loop, src_loop);
2942 src_loop = src_loop->next;
2946 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2948 void
2949 redirect_all_calls (copy_body_data * id, basic_block bb)
2951 gimple_stmt_iterator si;
2952 gimple *last = last_stmt (bb);
2953 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2955 gimple *stmt = gsi_stmt (si);
2956 if (is_gimple_call (stmt))
2958 tree old_lhs = gimple_call_lhs (stmt);
2959 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2960 if (edge)
2962 gimple *new_stmt
2963 = cgraph_edge::redirect_call_stmt_to_callee (edge);
2964 /* If IPA-SRA transformation, run as part of edge redirection,
2965 removed the LHS because it is unused, save it to
2966 killed_new_ssa_names so that we can prune it from debug
2967 statements. */
2968 if (old_lhs
2969 && TREE_CODE (old_lhs) == SSA_NAME
2970 && !gimple_call_lhs (new_stmt))
2972 if (!id->killed_new_ssa_names)
2973 id->killed_new_ssa_names = new hash_set<tree> (16);
2974 id->killed_new_ssa_names->add (old_lhs);
2977 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2978 gimple_purge_dead_eh_edges (bb);
2984 /* Make a copy of the body of FN so that it can be inserted inline in
2985 another function. Walks FN via CFG, returns new fndecl. */
2987 static tree
2988 copy_cfg_body (copy_body_data * id,
2989 basic_block entry_block_map, basic_block exit_block_map,
2990 basic_block new_entry)
2992 tree callee_fndecl = id->src_fn;
2993 /* Original cfun for the callee, doesn't change. */
2994 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2995 struct function *cfun_to_copy;
2996 basic_block bb;
2997 tree new_fndecl = NULL;
2998 bool need_debug_cleanup = false;
2999 int last;
3000 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3001 profile_count num = entry_block_map->count;
3003 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3005 /* Register specific tree functions. */
3006 gimple_register_cfg_hooks ();
3008 /* If we are inlining just region of the function, make sure to connect
3009 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
3010 part of loop, we must compute frequency and probability of
3011 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3012 probabilities of edges incoming from nonduplicated region. */
3013 if (new_entry)
3015 edge e;
3016 edge_iterator ei;
3017 den = profile_count::zero ();
3019 FOR_EACH_EDGE (e, ei, new_entry->preds)
3020 if (!e->src->aux)
3021 den += e->count ();
3022 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
3025 profile_count::adjust_for_ipa_scaling (&num, &den);
3027 /* Must have a CFG here at this point. */
3028 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3029 (DECL_STRUCT_FUNCTION (callee_fndecl)));
3032 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3033 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3034 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3035 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3037 /* Duplicate any exception-handling regions. */
3038 if (cfun->eh)
3039 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3040 remap_decl_1, id);
3042 /* Use aux pointers to map the original blocks to copy. */
3043 FOR_EACH_BB_FN (bb, cfun_to_copy)
3044 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3046 basic_block new_bb = copy_bb (id, bb, num, den);
3047 bb->aux = new_bb;
3048 new_bb->aux = bb;
3049 new_bb->loop_father = entry_block_map->loop_father;
3052 last = last_basic_block_for_fn (cfun);
3054 /* Now that we've duplicated the blocks, duplicate their edges. */
3055 basic_block abnormal_goto_dest = NULL;
3056 if (id->call_stmt
3057 && stmt_can_make_abnormal_goto (id->call_stmt))
3059 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3061 bb = gimple_bb (id->call_stmt);
3062 gsi_next (&gsi);
3063 if (gsi_end_p (gsi))
3064 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3066 FOR_ALL_BB_FN (bb, cfun_to_copy)
3067 if (!id->blocks_to_copy
3068 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3069 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3070 abnormal_goto_dest, id);
3072 if (id->eh_landing_pad_dest)
3074 add_clobbers_to_eh_landing_pad (id);
3075 id->eh_landing_pad_dest = NULL;
3078 if (new_entry)
3080 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3081 EDGE_FALLTHRU);
3082 e->probability = profile_probability::always ();
3085 /* Duplicate the loop tree, if available and wanted. */
3086 if (loops_for_fn (src_cfun) != NULL
3087 && current_loops != NULL)
3089 copy_loops (id, entry_block_map->loop_father,
3090 get_loop (src_cfun, 0));
3091 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3092 loops_state_set (LOOPS_NEED_FIXUP);
3095 /* If the loop tree in the source function needed fixup, mark the
3096 destination loop tree for fixup, too. */
3097 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3098 loops_state_set (LOOPS_NEED_FIXUP);
3100 if (gimple_in_ssa_p (cfun))
3101 FOR_ALL_BB_FN (bb, cfun_to_copy)
3102 if (!id->blocks_to_copy
3103 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3104 copy_phis_for_bb (bb, id);
3106 FOR_ALL_BB_FN (bb, cfun_to_copy)
3107 if (bb->aux)
3109 if (need_debug_cleanup
3110 && bb->index != ENTRY_BLOCK
3111 && bb->index != EXIT_BLOCK)
3112 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3113 /* Update call edge destinations. This cannot be done before loop
3114 info is updated, because we may split basic blocks. */
3115 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3116 && bb->index != ENTRY_BLOCK
3117 && bb->index != EXIT_BLOCK)
3118 redirect_all_calls (id, (basic_block)bb->aux);
3119 ((basic_block)bb->aux)->aux = NULL;
3120 bb->aux = NULL;
3123 /* Zero out AUX fields of newly created block during EH edge
3124 insertion. */
3125 for (; last < last_basic_block_for_fn (cfun); last++)
3127 if (need_debug_cleanup)
3128 maybe_move_debug_stmts_to_successors (id,
3129 BASIC_BLOCK_FOR_FN (cfun, last));
3130 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3131 /* Update call edge destinations. This cannot be done before loop
3132 info is updated, because we may split basic blocks. */
3133 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3134 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3136 entry_block_map->aux = NULL;
3137 exit_block_map->aux = NULL;
3139 if (id->eh_map)
3141 delete id->eh_map;
3142 id->eh_map = NULL;
3144 if (id->dependence_map)
3146 delete id->dependence_map;
3147 id->dependence_map = NULL;
3150 return new_fndecl;
3153 /* Copy the debug STMT using ID. We deal with these statements in a
3154 special way: if any variable in their VALUE expression wasn't
3155 remapped yet, we won't remap it, because that would get decl uids
3156 out of sync, causing codegen differences between -g and -g0. If
3157 this arises, we drop the VALUE expression altogether. */
3159 static void
3160 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3162 tree t, *n;
3163 struct walk_stmt_info wi;
3165 if (tree block = gimple_block (stmt))
3167 n = id->decl_map->get (block);
3168 gimple_set_block (stmt, n ? *n : id->block);
3171 if (gimple_debug_nonbind_marker_p (stmt))
3172 return;
3174 /* Remap all the operands in COPY. */
3175 memset (&wi, 0, sizeof (wi));
3176 wi.info = id;
3178 processing_debug_stmt = 1;
3180 if (gimple_debug_source_bind_p (stmt))
3181 t = gimple_debug_source_bind_get_var (stmt);
3182 else if (gimple_debug_bind_p (stmt))
3183 t = gimple_debug_bind_get_var (stmt);
3184 else
3185 gcc_unreachable ();
3187 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3188 && (n = id->debug_map->get (t)))
3190 gcc_assert (VAR_P (*n));
3191 t = *n;
3193 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3194 /* T is a non-localized variable. */;
3195 else
3196 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3198 if (gimple_debug_bind_p (stmt))
3200 gimple_debug_bind_set_var (stmt, t);
3202 if (gimple_debug_bind_has_value_p (stmt))
3203 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3204 remap_gimple_op_r, &wi, NULL);
3206 /* Punt if any decl couldn't be remapped. */
3207 if (processing_debug_stmt < 0)
3208 gimple_debug_bind_reset_value (stmt);
3210 else if (gimple_debug_source_bind_p (stmt))
3212 gimple_debug_source_bind_set_var (stmt, t);
3213 /* When inlining and source bind refers to one of the optimized
3214 away parameters, change the source bind into normal debug bind
3215 referring to the corresponding DEBUG_EXPR_DECL that should have
3216 been bound before the call stmt. */
3217 t = gimple_debug_source_bind_get_value (stmt);
3218 if (t != NULL_TREE
3219 && TREE_CODE (t) == PARM_DECL
3220 && id->call_stmt)
3222 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3223 unsigned int i;
3224 if (debug_args != NULL)
3226 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3227 if ((**debug_args)[i] == DECL_ORIGIN (t)
3228 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3230 t = (**debug_args)[i + 1];
3231 stmt->subcode = GIMPLE_DEBUG_BIND;
3232 gimple_debug_bind_set_value (stmt, t);
3233 break;
3237 if (gimple_debug_source_bind_p (stmt))
3238 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3239 remap_gimple_op_r, &wi, NULL);
3242 processing_debug_stmt = 0;
3244 update_stmt (stmt);
3247 /* Process deferred debug stmts. In order to give values better odds
3248 of being successfully remapped, we delay the processing of debug
3249 stmts until all other stmts that might require remapping are
3250 processed. */
3252 static void
3253 copy_debug_stmts (copy_body_data *id)
3255 size_t i;
3256 gdebug *stmt;
3258 if (!id->debug_stmts.exists ())
3259 return;
3261 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3262 copy_debug_stmt (stmt, id);
3264 id->debug_stmts.release ();
3267 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3268 another function. */
3270 static tree
3271 copy_tree_body (copy_body_data *id)
3273 tree fndecl = id->src_fn;
3274 tree body = DECL_SAVED_TREE (fndecl);
3276 walk_tree (&body, copy_tree_body_r, id, NULL);
3278 return body;
3281 /* Make a copy of the body of FN so that it can be inserted inline in
3282 another function. */
3284 static tree
3285 copy_body (copy_body_data *id,
3286 basic_block entry_block_map, basic_block exit_block_map,
3287 basic_block new_entry)
3289 tree fndecl = id->src_fn;
3290 tree body;
3292 /* If this body has a CFG, walk CFG and copy. */
3293 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3294 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3295 new_entry);
3296 copy_debug_stmts (id);
3297 delete id->killed_new_ssa_names;
3298 id->killed_new_ssa_names = NULL;
3300 return body;
3303 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3304 defined in function FN, or of a data member thereof. */
3306 static bool
3307 self_inlining_addr_expr (tree value, tree fn)
3309 tree var;
3311 if (TREE_CODE (value) != ADDR_EXPR)
3312 return false;
3314 var = get_base_address (TREE_OPERAND (value, 0));
3316 return var && auto_var_in_fn_p (var, fn);
3319 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3320 lexical block and line number information from base_stmt, if given,
3321 or from the last stmt of the block otherwise. */
3323 static gimple *
3324 insert_init_debug_bind (copy_body_data *id,
3325 basic_block bb, tree var, tree value,
3326 gimple *base_stmt)
3328 gimple *note;
3329 gimple_stmt_iterator gsi;
3330 tree tracked_var;
3332 if (!gimple_in_ssa_p (id->src_cfun))
3333 return NULL;
3335 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3336 return NULL;
3338 tracked_var = target_for_debug_bind (var);
3339 if (!tracked_var)
3340 return NULL;
3342 if (bb)
3344 gsi = gsi_last_bb (bb);
3345 if (!base_stmt && !gsi_end_p (gsi))
3346 base_stmt = gsi_stmt (gsi);
3349 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3351 if (bb)
3353 if (!gsi_end_p (gsi))
3354 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3355 else
3356 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3359 return note;
3362 static void
3363 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3365 /* If VAR represents a zero-sized variable, it's possible that the
3366 assignment statement may result in no gimple statements. */
3367 if (init_stmt)
3369 gimple_stmt_iterator si = gsi_last_bb (bb);
3371 /* We can end up with init statements that store to a non-register
3372 from a rhs with a conversion. Handle that here by forcing the
3373 rhs into a temporary. gimple_regimplify_operands is not
3374 prepared to do this for us. */
3375 if (!is_gimple_debug (init_stmt)
3376 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3377 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3378 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3380 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3381 gimple_expr_type (init_stmt),
3382 gimple_assign_rhs1 (init_stmt));
3383 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3384 GSI_NEW_STMT);
3385 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3386 gimple_assign_set_rhs1 (init_stmt, rhs);
3388 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3389 if (!is_gimple_debug (init_stmt))
3391 gimple_regimplify_operands (init_stmt, &si);
3393 tree def = gimple_assign_lhs (init_stmt);
3394 insert_init_debug_bind (id, bb, def, def, init_stmt);
3399 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3400 if need be (which should only be necessary for invalid programs). Attempt
3401 to convert VAL to TYPE and return the result if it is possible, just return
3402 a zero constant of the given type if it fails. */
3404 tree
3405 force_value_to_type (tree type, tree value)
3407 /* If we can match up types by promotion/demotion do so. */
3408 if (fold_convertible_p (type, value))
3409 return fold_convert (type, value);
3411 /* ??? For valid programs we should not end up here.
3412 Still if we end up with truly mismatched types here, fall back
3413 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3414 GIMPLE to the following passes. */
3415 if (!is_gimple_reg_type (TREE_TYPE (value))
3416 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3417 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3418 else
3419 return build_zero_cst (type);
3422 /* Initialize parameter P with VALUE. If needed, produce init statement
3423 at the end of BB. When BB is NULL, we return init statement to be
3424 output later. */
3425 static gimple *
3426 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3427 basic_block bb, tree *vars)
3429 gimple *init_stmt = NULL;
3430 tree var;
3431 tree rhs = value;
3432 tree def = (gimple_in_ssa_p (cfun)
3433 ? ssa_default_def (id->src_cfun, p) : NULL);
3435 if (value
3436 && value != error_mark_node
3437 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3438 rhs = force_value_to_type (TREE_TYPE (p), value);
3440 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3441 here since the type of this decl must be visible to the calling
3442 function. */
3443 var = copy_decl_to_var (p, id);
3445 /* Declare this new variable. */
3446 DECL_CHAIN (var) = *vars;
3447 *vars = var;
3449 /* Make gimplifier happy about this variable. */
3450 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3452 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3453 we would not need to create a new variable here at all, if it
3454 weren't for debug info. Still, we can just use the argument
3455 value. */
3456 if (TREE_READONLY (p)
3457 && !TREE_ADDRESSABLE (p)
3458 && value && !TREE_SIDE_EFFECTS (value)
3459 && !def)
3461 /* We may produce non-gimple trees by adding NOPs or introduce
3462 invalid sharing when operand is not really constant.
3463 It is not big deal to prohibit constant propagation here as
3464 we will constant propagate in DOM1 pass anyway. */
3465 if (is_gimple_min_invariant (value)
3466 && useless_type_conversion_p (TREE_TYPE (p),
3467 TREE_TYPE (value))
3468 /* We have to be very careful about ADDR_EXPR. Make sure
3469 the base variable isn't a local variable of the inlined
3470 function, e.g., when doing recursive inlining, direct or
3471 mutually-recursive or whatever, which is why we don't
3472 just test whether fn == current_function_decl. */
3473 && ! self_inlining_addr_expr (value, fn))
3475 insert_decl_map (id, p, value);
3476 insert_debug_decl_map (id, p, var);
3477 return insert_init_debug_bind (id, bb, var, value, NULL);
3481 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3482 that way, when the PARM_DECL is encountered, it will be
3483 automatically replaced by the VAR_DECL. */
3484 insert_decl_map (id, p, var);
3486 /* Even if P was TREE_READONLY, the new VAR should not be.
3487 In the original code, we would have constructed a
3488 temporary, and then the function body would have never
3489 changed the value of P. However, now, we will be
3490 constructing VAR directly. The constructor body may
3491 change its value multiple times as it is being
3492 constructed. Therefore, it must not be TREE_READONLY;
3493 the back-end assumes that TREE_READONLY variable is
3494 assigned to only once. */
3495 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3496 TREE_READONLY (var) = 0;
3498 /* If there is no setup required and we are in SSA, take the easy route
3499 replacing all SSA names representing the function parameter by the
3500 SSA name passed to function.
3502 We need to construct map for the variable anyway as it might be used
3503 in different SSA names when parameter is set in function.
3505 Do replacement at -O0 for const arguments replaced by constant.
3506 This is important for builtin_constant_p and other construct requiring
3507 constant argument to be visible in inlined function body. */
3508 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3509 && (optimize
3510 || (TREE_READONLY (p)
3511 && is_gimple_min_invariant (rhs)))
3512 && (TREE_CODE (rhs) == SSA_NAME
3513 || is_gimple_min_invariant (rhs))
3514 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3516 insert_decl_map (id, def, rhs);
3517 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3520 /* If the value of argument is never used, don't care about initializing
3521 it. */
3522 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3524 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3525 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3528 /* Initialize this VAR_DECL from the equivalent argument. Convert
3529 the argument to the proper type in case it was promoted. */
3530 if (value)
3532 if (rhs == error_mark_node)
3534 insert_decl_map (id, p, var);
3535 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3538 STRIP_USELESS_TYPE_CONVERSION (rhs);
3540 /* If we are in SSA form properly remap the default definition
3541 or assign to a dummy SSA name if the parameter is unused and
3542 we are not optimizing. */
3543 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3545 if (def)
3547 def = remap_ssa_name (def, id);
3548 init_stmt = gimple_build_assign (def, rhs);
3549 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3550 set_ssa_default_def (cfun, var, NULL);
3552 else if (!optimize)
3554 def = make_ssa_name (var);
3555 init_stmt = gimple_build_assign (def, rhs);
3558 else
3559 init_stmt = gimple_build_assign (var, rhs);
3561 if (bb && init_stmt)
3562 insert_init_stmt (id, bb, init_stmt);
3564 return init_stmt;
3567 /* Generate code to initialize the parameters of the function at the
3568 top of the stack in ID from the GIMPLE_CALL STMT. */
3570 static void
3571 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3572 tree fn, basic_block bb)
3574 tree parms;
3575 size_t i;
3576 tree p;
3577 tree vars = NULL_TREE;
3578 tree static_chain = gimple_call_chain (stmt);
3580 /* Figure out what the parameters are. */
3581 parms = DECL_ARGUMENTS (fn);
3583 /* Loop through the parameter declarations, replacing each with an
3584 equivalent VAR_DECL, appropriately initialized. */
3585 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3587 tree val;
3588 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3589 setup_one_parameter (id, p, val, fn, bb, &vars);
3591 /* After remapping parameters remap their types. This has to be done
3592 in a second loop over all parameters to appropriately remap
3593 variable sized arrays when the size is specified in a
3594 parameter following the array. */
3595 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3597 tree *varp = id->decl_map->get (p);
3598 if (varp && VAR_P (*varp))
3600 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3601 ? ssa_default_def (id->src_cfun, p) : NULL);
3602 tree var = *varp;
3603 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3604 /* Also remap the default definition if it was remapped
3605 to the default definition of the parameter replacement
3606 by the parameter setup. */
3607 if (def)
3609 tree *defp = id->decl_map->get (def);
3610 if (defp
3611 && TREE_CODE (*defp) == SSA_NAME
3612 && SSA_NAME_VAR (*defp) == var)
3613 TREE_TYPE (*defp) = TREE_TYPE (var);
3618 /* Initialize the static chain. */
3619 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3620 gcc_assert (fn != current_function_decl);
3621 if (p)
3623 /* No static chain? Seems like a bug in tree-nested.c. */
3624 gcc_assert (static_chain);
3626 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3629 declare_inline_vars (id->block, vars);
3633 /* Declare a return variable to replace the RESULT_DECL for the
3634 function we are calling. An appropriate DECL_STMT is returned.
3635 The USE_STMT is filled to contain a use of the declaration to
3636 indicate the return value of the function.
3638 RETURN_SLOT, if non-null is place where to store the result. It
3639 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3640 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3642 The return value is a (possibly null) value that holds the result
3643 as seen by the caller. */
3645 static tree
3646 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3647 basic_block entry_bb)
3649 tree callee = id->src_fn;
3650 tree result = DECL_RESULT (callee);
3651 tree callee_type = TREE_TYPE (result);
3652 tree caller_type;
3653 tree var, use;
3655 /* Handle type-mismatches in the function declaration return type
3656 vs. the call expression. */
3657 if (modify_dest)
3658 caller_type = TREE_TYPE (modify_dest);
3659 else if (return_slot)
3660 caller_type = TREE_TYPE (return_slot);
3661 else /* No LHS on the call. */
3662 caller_type = TREE_TYPE (TREE_TYPE (callee));
3664 /* We don't need to do anything for functions that don't return anything. */
3665 if (VOID_TYPE_P (callee_type))
3666 return NULL_TREE;
3668 /* If there was a return slot, then the return value is the
3669 dereferenced address of that object. */
3670 if (return_slot)
3672 /* The front end shouldn't have used both return_slot and
3673 a modify expression. */
3674 gcc_assert (!modify_dest);
3675 if (DECL_BY_REFERENCE (result))
3677 tree return_slot_addr = build_fold_addr_expr (return_slot);
3678 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3680 /* We are going to construct *&return_slot and we can't do that
3681 for variables believed to be not addressable.
3683 FIXME: This check possibly can match, because values returned
3684 via return slot optimization are not believed to have address
3685 taken by alias analysis. */
3686 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3687 var = return_slot_addr;
3688 mark_addressable (return_slot);
3690 else
3692 var = return_slot;
3693 gcc_assert (TREE_CODE (var) != SSA_NAME);
3694 if (TREE_ADDRESSABLE (result))
3695 mark_addressable (var);
3697 if (DECL_NOT_GIMPLE_REG_P (result)
3698 && DECL_P (var))
3699 DECL_NOT_GIMPLE_REG_P (var) = 1;
3701 if (!useless_type_conversion_p (callee_type, caller_type))
3702 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3704 use = NULL;
3705 goto done;
3708 /* All types requiring non-trivial constructors should have been handled. */
3709 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3711 /* Attempt to avoid creating a new temporary variable. */
3712 if (modify_dest
3713 && TREE_CODE (modify_dest) != SSA_NAME)
3715 bool use_it = false;
3717 /* We can't use MODIFY_DEST if there's type promotion involved. */
3718 if (!useless_type_conversion_p (callee_type, caller_type))
3719 use_it = false;
3721 /* ??? If we're assigning to a variable sized type, then we must
3722 reuse the destination variable, because we've no good way to
3723 create variable sized temporaries at this point. */
3724 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3725 use_it = true;
3727 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3728 reuse it as the result of the call directly. Don't do this if
3729 it would promote MODIFY_DEST to addressable. */
3730 else if (TREE_ADDRESSABLE (result))
3731 use_it = false;
3732 else
3734 tree base_m = get_base_address (modify_dest);
3736 /* If the base isn't a decl, then it's a pointer, and we don't
3737 know where that's going to go. */
3738 if (!DECL_P (base_m))
3739 use_it = false;
3740 else if (is_global_var (base_m))
3741 use_it = false;
3742 else if (DECL_NOT_GIMPLE_REG_P (result)
3743 && !DECL_NOT_GIMPLE_REG_P (base_m))
3744 use_it = false;
3745 else if (!TREE_ADDRESSABLE (base_m))
3746 use_it = true;
3749 if (use_it)
3751 var = modify_dest;
3752 use = NULL;
3753 goto done;
3757 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3759 var = copy_result_decl_to_var (result, id);
3760 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3762 /* Do not have the rest of GCC warn about this variable as it should
3763 not be visible to the user. */
3764 TREE_NO_WARNING (var) = 1;
3766 declare_inline_vars (id->block, var);
3768 /* Build the use expr. If the return type of the function was
3769 promoted, convert it back to the expected type. */
3770 use = var;
3771 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3773 /* If we can match up types by promotion/demotion do so. */
3774 if (fold_convertible_p (caller_type, var))
3775 use = fold_convert (caller_type, var);
3776 else
3778 /* ??? For valid programs we should not end up here.
3779 Still if we end up with truly mismatched types here, fall back
3780 to using a MEM_REF to not leak invalid GIMPLE to the following
3781 passes. */
3782 /* Prevent var from being written into SSA form. */
3783 if (is_gimple_reg_type (TREE_TYPE (var)))
3784 DECL_NOT_GIMPLE_REG_P (var) = true;
3785 use = fold_build2 (MEM_REF, caller_type,
3786 build_fold_addr_expr (var),
3787 build_int_cst (ptr_type_node, 0));
3791 STRIP_USELESS_TYPE_CONVERSION (use);
3793 if (DECL_BY_REFERENCE (result))
3795 TREE_ADDRESSABLE (var) = 1;
3796 var = build_fold_addr_expr (var);
3799 done:
3800 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3801 way, when the RESULT_DECL is encountered, it will be
3802 automatically replaced by the VAR_DECL.
3804 When returning by reference, ensure that RESULT_DECL remaps to
3805 gimple_val. */
3806 if (DECL_BY_REFERENCE (result)
3807 && !is_gimple_val (var))
3809 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3810 insert_decl_map (id, result, temp);
3811 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3812 it's default_def SSA_NAME. */
3813 if (gimple_in_ssa_p (id->src_cfun)
3814 && is_gimple_reg (result))
3816 temp = make_ssa_name (temp);
3817 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3819 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3821 else
3822 insert_decl_map (id, result, var);
3824 /* Remember this so we can ignore it in remap_decls. */
3825 id->retvar = var;
3826 return use;
3829 /* Determine if the function can be copied. If so return NULL. If
3830 not return a string describng the reason for failure. */
3832 const char *
3833 copy_forbidden (struct function *fun)
3835 const char *reason = fun->cannot_be_copied_reason;
3837 /* Only examine the function once. */
3838 if (fun->cannot_be_copied_set)
3839 return reason;
3841 /* We cannot copy a function that receives a non-local goto
3842 because we cannot remap the destination label used in the
3843 function that is performing the non-local goto. */
3844 /* ??? Actually, this should be possible, if we work at it.
3845 No doubt there's just a handful of places that simply
3846 assume it doesn't happen and don't substitute properly. */
3847 if (fun->has_nonlocal_label)
3849 reason = G_("function %q+F can never be copied "
3850 "because it receives a non-local goto");
3851 goto fail;
3854 if (fun->has_forced_label_in_static)
3856 reason = G_("function %q+F can never be copied because it saves "
3857 "address of local label in a static variable");
3858 goto fail;
3861 fail:
3862 fun->cannot_be_copied_reason = reason;
3863 fun->cannot_be_copied_set = true;
3864 return reason;
3868 static const char *inline_forbidden_reason;
3870 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3871 iff a function cannot be inlined. Also sets the reason why. */
3873 static tree
3874 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3875 struct walk_stmt_info *wip)
3877 tree fn = (tree) wip->info;
3878 tree t;
3879 gimple *stmt = gsi_stmt (*gsi);
3881 switch (gimple_code (stmt))
3883 case GIMPLE_CALL:
3884 /* Refuse to inline alloca call unless user explicitly forced so as
3885 this may change program's memory overhead drastically when the
3886 function using alloca is called in loop. In GCC present in
3887 SPEC2000 inlining into schedule_block cause it to require 2GB of
3888 RAM instead of 256MB. Don't do so for alloca calls emitted for
3889 VLA objects as those can't cause unbounded growth (they're always
3890 wrapped inside stack_save/stack_restore regions. */
3891 if (gimple_maybe_alloca_call_p (stmt)
3892 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3893 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3895 inline_forbidden_reason
3896 = G_("function %q+F can never be inlined because it uses "
3897 "alloca (override using the always_inline attribute)");
3898 *handled_ops_p = true;
3899 return fn;
3902 t = gimple_call_fndecl (stmt);
3903 if (t == NULL_TREE)
3904 break;
3906 /* We cannot inline functions that call setjmp. */
3907 if (setjmp_call_p (t))
3909 inline_forbidden_reason
3910 = G_("function %q+F can never be inlined because it uses setjmp");
3911 *handled_ops_p = true;
3912 return t;
3915 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3916 switch (DECL_FUNCTION_CODE (t))
3918 /* We cannot inline functions that take a variable number of
3919 arguments. */
3920 case BUILT_IN_VA_START:
3921 case BUILT_IN_NEXT_ARG:
3922 case BUILT_IN_VA_END:
3923 inline_forbidden_reason
3924 = G_("function %q+F can never be inlined because it "
3925 "uses variable argument lists");
3926 *handled_ops_p = true;
3927 return t;
3929 case BUILT_IN_LONGJMP:
3930 /* We can't inline functions that call __builtin_longjmp at
3931 all. The non-local goto machinery really requires the
3932 destination be in a different function. If we allow the
3933 function calling __builtin_longjmp to be inlined into the
3934 function calling __builtin_setjmp, Things will Go Awry. */
3935 inline_forbidden_reason
3936 = G_("function %q+F can never be inlined because "
3937 "it uses setjmp-longjmp exception handling");
3938 *handled_ops_p = true;
3939 return t;
3941 case BUILT_IN_NONLOCAL_GOTO:
3942 /* Similarly. */
3943 inline_forbidden_reason
3944 = G_("function %q+F can never be inlined because "
3945 "it uses non-local goto");
3946 *handled_ops_p = true;
3947 return t;
3949 case BUILT_IN_RETURN:
3950 case BUILT_IN_APPLY_ARGS:
3951 /* If a __builtin_apply_args caller would be inlined,
3952 it would be saving arguments of the function it has
3953 been inlined into. Similarly __builtin_return would
3954 return from the function the inline has been inlined into. */
3955 inline_forbidden_reason
3956 = G_("function %q+F can never be inlined because "
3957 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3958 *handled_ops_p = true;
3959 return t;
3961 default:
3962 break;
3964 break;
3966 case GIMPLE_GOTO:
3967 t = gimple_goto_dest (stmt);
3969 /* We will not inline a function which uses computed goto. The
3970 addresses of its local labels, which may be tucked into
3971 global storage, are of course not constant across
3972 instantiations, which causes unexpected behavior. */
3973 if (TREE_CODE (t) != LABEL_DECL)
3975 inline_forbidden_reason
3976 = G_("function %q+F can never be inlined "
3977 "because it contains a computed goto");
3978 *handled_ops_p = true;
3979 return t;
3981 break;
3983 default:
3984 break;
3987 *handled_ops_p = false;
3988 return NULL_TREE;
3991 /* Return true if FNDECL is a function that cannot be inlined into
3992 another one. */
3994 static bool
3995 inline_forbidden_p (tree fndecl)
3997 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3998 struct walk_stmt_info wi;
3999 basic_block bb;
4000 bool forbidden_p = false;
4002 /* First check for shared reasons not to copy the code. */
4003 inline_forbidden_reason = copy_forbidden (fun);
4004 if (inline_forbidden_reason != NULL)
4005 return true;
4007 /* Next, walk the statements of the function looking for
4008 constraucts we can't handle, or are non-optimal for inlining. */
4009 hash_set<tree> visited_nodes;
4010 memset (&wi, 0, sizeof (wi));
4011 wi.info = (void *) fndecl;
4012 wi.pset = &visited_nodes;
4014 FOR_EACH_BB_FN (bb, fun)
4016 gimple *ret;
4017 gimple_seq seq = bb_seq (bb);
4018 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4019 forbidden_p = (ret != NULL);
4020 if (forbidden_p)
4021 break;
4024 return forbidden_p;
4027 /* Return false if the function FNDECL cannot be inlined on account of its
4028 attributes, true otherwise. */
4029 static bool
4030 function_attribute_inlinable_p (const_tree fndecl)
4032 if (targetm.attribute_table)
4034 const_tree a;
4036 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4038 const_tree name = get_attribute_name (a);
4039 int i;
4041 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4042 if (is_attribute_p (targetm.attribute_table[i].name, name))
4043 return targetm.function_attribute_inlinable_p (fndecl);
4047 return true;
4050 /* Returns nonzero if FN is a function that does not have any
4051 fundamental inline blocking properties. */
4053 bool
4054 tree_inlinable_function_p (tree fn)
4056 bool inlinable = true;
4057 bool do_warning;
4058 tree always_inline;
4060 /* If we've already decided this function shouldn't be inlined,
4061 there's no need to check again. */
4062 if (DECL_UNINLINABLE (fn))
4063 return false;
4065 /* We only warn for functions declared `inline' by the user. */
4066 do_warning = (opt_for_fn (fn, warn_inline)
4067 && DECL_DECLARED_INLINE_P (fn)
4068 && !DECL_NO_INLINE_WARNING_P (fn)
4069 && !DECL_IN_SYSTEM_HEADER (fn));
4071 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4073 if (flag_no_inline
4074 && always_inline == NULL)
4076 if (do_warning)
4077 warning (OPT_Winline, "function %q+F can never be inlined because it "
4078 "is suppressed using %<-fno-inline%>", fn);
4079 inlinable = false;
4082 else if (!function_attribute_inlinable_p (fn))
4084 if (do_warning)
4085 warning (OPT_Winline, "function %q+F can never be inlined because it "
4086 "uses attributes conflicting with inlining", fn);
4087 inlinable = false;
4090 else if (inline_forbidden_p (fn))
4092 /* See if we should warn about uninlinable functions. Previously,
4093 some of these warnings would be issued while trying to expand
4094 the function inline, but that would cause multiple warnings
4095 about functions that would for example call alloca. But since
4096 this a property of the function, just one warning is enough.
4097 As a bonus we can now give more details about the reason why a
4098 function is not inlinable. */
4099 if (always_inline)
4100 error (inline_forbidden_reason, fn);
4101 else if (do_warning)
4102 warning (OPT_Winline, inline_forbidden_reason, fn);
4104 inlinable = false;
4107 /* Squirrel away the result so that we don't have to check again. */
4108 DECL_UNINLINABLE (fn) = !inlinable;
4110 return inlinable;
4113 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4114 word size and take possible memcpy call into account and return
4115 cost based on whether optimizing for size or speed according to SPEED_P. */
4118 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4120 HOST_WIDE_INT size;
4122 gcc_assert (!VOID_TYPE_P (type));
4124 if (TREE_CODE (type) == VECTOR_TYPE)
4126 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4127 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4128 int orig_mode_size
4129 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4130 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4131 return ((orig_mode_size + simd_mode_size - 1)
4132 / simd_mode_size);
4135 size = int_size_in_bytes (type);
4137 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4138 /* Cost of a memcpy call, 3 arguments and the call. */
4139 return 4;
4140 else
4141 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4144 /* Returns cost of operation CODE, according to WEIGHTS */
4146 static int
4147 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4148 tree op1 ATTRIBUTE_UNUSED, tree op2)
4150 switch (code)
4152 /* These are "free" conversions, or their presumed cost
4153 is folded into other operations. */
4154 case RANGE_EXPR:
4155 CASE_CONVERT:
4156 case COMPLEX_EXPR:
4157 case PAREN_EXPR:
4158 case VIEW_CONVERT_EXPR:
4159 return 0;
4161 /* Assign cost of 1 to usual operations.
4162 ??? We may consider mapping RTL costs to this. */
4163 case COND_EXPR:
4164 case VEC_COND_EXPR:
4165 case VEC_PERM_EXPR:
4167 case PLUS_EXPR:
4168 case POINTER_PLUS_EXPR:
4169 case POINTER_DIFF_EXPR:
4170 case MINUS_EXPR:
4171 case MULT_EXPR:
4172 case MULT_HIGHPART_EXPR:
4174 case ADDR_SPACE_CONVERT_EXPR:
4175 case FIXED_CONVERT_EXPR:
4176 case FIX_TRUNC_EXPR:
4178 case NEGATE_EXPR:
4179 case FLOAT_EXPR:
4180 case MIN_EXPR:
4181 case MAX_EXPR:
4182 case ABS_EXPR:
4183 case ABSU_EXPR:
4185 case LSHIFT_EXPR:
4186 case RSHIFT_EXPR:
4187 case LROTATE_EXPR:
4188 case RROTATE_EXPR:
4190 case BIT_IOR_EXPR:
4191 case BIT_XOR_EXPR:
4192 case BIT_AND_EXPR:
4193 case BIT_NOT_EXPR:
4195 case TRUTH_ANDIF_EXPR:
4196 case TRUTH_ORIF_EXPR:
4197 case TRUTH_AND_EXPR:
4198 case TRUTH_OR_EXPR:
4199 case TRUTH_XOR_EXPR:
4200 case TRUTH_NOT_EXPR:
4202 case LT_EXPR:
4203 case LE_EXPR:
4204 case GT_EXPR:
4205 case GE_EXPR:
4206 case EQ_EXPR:
4207 case NE_EXPR:
4208 case ORDERED_EXPR:
4209 case UNORDERED_EXPR:
4211 case UNLT_EXPR:
4212 case UNLE_EXPR:
4213 case UNGT_EXPR:
4214 case UNGE_EXPR:
4215 case UNEQ_EXPR:
4216 case LTGT_EXPR:
4218 case CONJ_EXPR:
4220 case PREDECREMENT_EXPR:
4221 case PREINCREMENT_EXPR:
4222 case POSTDECREMENT_EXPR:
4223 case POSTINCREMENT_EXPR:
4225 case REALIGN_LOAD_EXPR:
4227 case WIDEN_PLUS_EXPR:
4228 case WIDEN_MINUS_EXPR:
4229 case WIDEN_SUM_EXPR:
4230 case WIDEN_MULT_EXPR:
4231 case DOT_PROD_EXPR:
4232 case SAD_EXPR:
4233 case WIDEN_MULT_PLUS_EXPR:
4234 case WIDEN_MULT_MINUS_EXPR:
4235 case WIDEN_LSHIFT_EXPR:
4237 case VEC_WIDEN_PLUS_HI_EXPR:
4238 case VEC_WIDEN_PLUS_LO_EXPR:
4239 case VEC_WIDEN_MINUS_HI_EXPR:
4240 case VEC_WIDEN_MINUS_LO_EXPR:
4241 case VEC_WIDEN_MULT_HI_EXPR:
4242 case VEC_WIDEN_MULT_LO_EXPR:
4243 case VEC_WIDEN_MULT_EVEN_EXPR:
4244 case VEC_WIDEN_MULT_ODD_EXPR:
4245 case VEC_UNPACK_HI_EXPR:
4246 case VEC_UNPACK_LO_EXPR:
4247 case VEC_UNPACK_FLOAT_HI_EXPR:
4248 case VEC_UNPACK_FLOAT_LO_EXPR:
4249 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4250 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4251 case VEC_PACK_TRUNC_EXPR:
4252 case VEC_PACK_SAT_EXPR:
4253 case VEC_PACK_FIX_TRUNC_EXPR:
4254 case VEC_PACK_FLOAT_EXPR:
4255 case VEC_WIDEN_LSHIFT_HI_EXPR:
4256 case VEC_WIDEN_LSHIFT_LO_EXPR:
4257 case VEC_DUPLICATE_EXPR:
4258 case VEC_SERIES_EXPR:
4260 return 1;
4262 /* Few special cases of expensive operations. This is useful
4263 to avoid inlining on functions having too many of these. */
4264 case TRUNC_DIV_EXPR:
4265 case CEIL_DIV_EXPR:
4266 case FLOOR_DIV_EXPR:
4267 case ROUND_DIV_EXPR:
4268 case EXACT_DIV_EXPR:
4269 case TRUNC_MOD_EXPR:
4270 case CEIL_MOD_EXPR:
4271 case FLOOR_MOD_EXPR:
4272 case ROUND_MOD_EXPR:
4273 case RDIV_EXPR:
4274 if (TREE_CODE (op2) != INTEGER_CST)
4275 return weights->div_mod_cost;
4276 return 1;
4278 /* Bit-field insertion needs several shift and mask operations. */
4279 case BIT_INSERT_EXPR:
4280 return 3;
4282 default:
4283 /* We expect a copy assignment with no operator. */
4284 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4285 return 0;
4290 /* Estimate number of instructions that will be created by expanding
4291 the statements in the statement sequence STMTS.
4292 WEIGHTS contains weights attributed to various constructs. */
4295 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4297 int cost;
4298 gimple_stmt_iterator gsi;
4300 cost = 0;
4301 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4302 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4304 return cost;
4308 /* Estimate number of instructions that will be created by expanding STMT.
4309 WEIGHTS contains weights attributed to various constructs. */
4312 estimate_num_insns (gimple *stmt, eni_weights *weights)
4314 unsigned cost, i;
4315 enum gimple_code code = gimple_code (stmt);
4316 tree lhs;
4317 tree rhs;
4319 switch (code)
4321 case GIMPLE_ASSIGN:
4322 /* Try to estimate the cost of assignments. We have three cases to
4323 deal with:
4324 1) Simple assignments to registers;
4325 2) Stores to things that must live in memory. This includes
4326 "normal" stores to scalars, but also assignments of large
4327 structures, or constructors of big arrays;
4329 Let us look at the first two cases, assuming we have "a = b + C":
4330 <GIMPLE_ASSIGN <var_decl "a">
4331 <plus_expr <var_decl "b"> <constant C>>
4332 If "a" is a GIMPLE register, the assignment to it is free on almost
4333 any target, because "a" usually ends up in a real register. Hence
4334 the only cost of this expression comes from the PLUS_EXPR, and we
4335 can ignore the GIMPLE_ASSIGN.
4336 If "a" is not a GIMPLE register, the assignment to "a" will most
4337 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4338 of moving something into "a", which we compute using the function
4339 estimate_move_cost. */
4340 if (gimple_clobber_p (stmt))
4341 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4343 lhs = gimple_assign_lhs (stmt);
4344 rhs = gimple_assign_rhs1 (stmt);
4346 cost = 0;
4348 /* Account for the cost of moving to / from memory. */
4349 if (gimple_store_p (stmt))
4350 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4351 if (gimple_assign_load_p (stmt))
4352 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4354 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4355 gimple_assign_rhs1 (stmt),
4356 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4357 == GIMPLE_BINARY_RHS
4358 ? gimple_assign_rhs2 (stmt) : NULL);
4359 break;
4361 case GIMPLE_COND:
4362 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4363 gimple_op (stmt, 0),
4364 gimple_op (stmt, 1));
4365 break;
4367 case GIMPLE_SWITCH:
4369 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4370 /* Take into account cost of the switch + guess 2 conditional jumps for
4371 each case label.
4373 TODO: once the switch expansion logic is sufficiently separated, we can
4374 do better job on estimating cost of the switch. */
4375 if (weights->time_based)
4376 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4377 else
4378 cost = gimple_switch_num_labels (switch_stmt) * 2;
4380 break;
4382 case GIMPLE_CALL:
4384 tree decl;
4386 if (gimple_call_internal_p (stmt))
4387 return 0;
4388 else if ((decl = gimple_call_fndecl (stmt))
4389 && fndecl_built_in_p (decl))
4391 /* Do not special case builtins where we see the body.
4392 This just confuse inliner. */
4393 struct cgraph_node *node;
4394 if (!(node = cgraph_node::get (decl))
4395 || node->definition)
4397 /* For buitins that are likely expanded to nothing or
4398 inlined do not account operand costs. */
4399 else if (is_simple_builtin (decl))
4400 return 0;
4401 else if (is_inexpensive_builtin (decl))
4402 return weights->target_builtin_call_cost;
4403 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4405 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4406 specialize the cheap expansion we do here.
4407 ??? This asks for a more general solution. */
4408 switch (DECL_FUNCTION_CODE (decl))
4410 case BUILT_IN_POW:
4411 case BUILT_IN_POWF:
4412 case BUILT_IN_POWL:
4413 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4414 && (real_equal
4415 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4416 &dconst2)))
4417 return estimate_operator_cost
4418 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4419 gimple_call_arg (stmt, 0));
4420 break;
4422 default:
4423 break;
4428 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4429 if (gimple_call_lhs (stmt))
4430 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4431 weights->time_based);
4432 for (i = 0; i < gimple_call_num_args (stmt); i++)
4434 tree arg = gimple_call_arg (stmt, i);
4435 cost += estimate_move_cost (TREE_TYPE (arg),
4436 weights->time_based);
4438 break;
4441 case GIMPLE_RETURN:
4442 return weights->return_cost;
4444 case GIMPLE_GOTO:
4445 case GIMPLE_LABEL:
4446 case GIMPLE_NOP:
4447 case GIMPLE_PHI:
4448 case GIMPLE_PREDICT:
4449 case GIMPLE_DEBUG:
4450 return 0;
4452 case GIMPLE_ASM:
4454 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4455 /* 1000 means infinity. This avoids overflows later
4456 with very long asm statements. */
4457 if (count > 1000)
4458 count = 1000;
4459 /* If this asm is asm inline, count anything as minimum size. */
4460 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4461 count = MIN (1, count);
4462 return MAX (1, count);
4465 case GIMPLE_RESX:
4466 /* This is either going to be an external function call with one
4467 argument, or two register copy statements plus a goto. */
4468 return 2;
4470 case GIMPLE_EH_DISPATCH:
4471 /* ??? This is going to turn into a switch statement. Ideally
4472 we'd have a look at the eh region and estimate the number of
4473 edges involved. */
4474 return 10;
4476 case GIMPLE_BIND:
4477 return estimate_num_insns_seq (
4478 gimple_bind_body (as_a <gbind *> (stmt)),
4479 weights);
4481 case GIMPLE_EH_FILTER:
4482 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4484 case GIMPLE_CATCH:
4485 return estimate_num_insns_seq (gimple_catch_handler (
4486 as_a <gcatch *> (stmt)),
4487 weights);
4489 case GIMPLE_TRY:
4490 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4491 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4493 /* OMP directives are generally very expensive. */
4495 case GIMPLE_OMP_RETURN:
4496 case GIMPLE_OMP_SECTIONS_SWITCH:
4497 case GIMPLE_OMP_ATOMIC_STORE:
4498 case GIMPLE_OMP_CONTINUE:
4499 /* ...except these, which are cheap. */
4500 return 0;
4502 case GIMPLE_OMP_ATOMIC_LOAD:
4503 return weights->omp_cost;
4505 case GIMPLE_OMP_FOR:
4506 return (weights->omp_cost
4507 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4508 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4510 case GIMPLE_OMP_PARALLEL:
4511 case GIMPLE_OMP_TASK:
4512 case GIMPLE_OMP_CRITICAL:
4513 case GIMPLE_OMP_MASTER:
4514 case GIMPLE_OMP_TASKGROUP:
4515 case GIMPLE_OMP_ORDERED:
4516 case GIMPLE_OMP_SCAN:
4517 case GIMPLE_OMP_SECTION:
4518 case GIMPLE_OMP_SECTIONS:
4519 case GIMPLE_OMP_SINGLE:
4520 case GIMPLE_OMP_TARGET:
4521 case GIMPLE_OMP_TEAMS:
4522 return (weights->omp_cost
4523 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4525 case GIMPLE_TRANSACTION:
4526 return (weights->tm_cost
4527 + estimate_num_insns_seq (gimple_transaction_body (
4528 as_a <gtransaction *> (stmt)),
4529 weights));
4531 default:
4532 gcc_unreachable ();
4535 return cost;
4538 /* Estimate number of instructions that will be created by expanding
4539 function FNDECL. WEIGHTS contains weights attributed to various
4540 constructs. */
4543 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4545 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4546 gimple_stmt_iterator bsi;
4547 basic_block bb;
4548 int n = 0;
4550 gcc_assert (my_function && my_function->cfg);
4551 FOR_EACH_BB_FN (bb, my_function)
4553 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4554 n += estimate_num_insns (gsi_stmt (bsi), weights);
4557 return n;
4561 /* Initializes weights used by estimate_num_insns. */
4563 void
4564 init_inline_once (void)
4566 eni_size_weights.call_cost = 1;
4567 eni_size_weights.indirect_call_cost = 3;
4568 eni_size_weights.target_builtin_call_cost = 1;
4569 eni_size_weights.div_mod_cost = 1;
4570 eni_size_weights.omp_cost = 40;
4571 eni_size_weights.tm_cost = 10;
4572 eni_size_weights.time_based = false;
4573 eni_size_weights.return_cost = 1;
4575 /* Estimating time for call is difficult, since we have no idea what the
4576 called function does. In the current uses of eni_time_weights,
4577 underestimating the cost does less harm than overestimating it, so
4578 we choose a rather small value here. */
4579 eni_time_weights.call_cost = 10;
4580 eni_time_weights.indirect_call_cost = 15;
4581 eni_time_weights.target_builtin_call_cost = 1;
4582 eni_time_weights.div_mod_cost = 10;
4583 eni_time_weights.omp_cost = 40;
4584 eni_time_weights.tm_cost = 40;
4585 eni_time_weights.time_based = true;
4586 eni_time_weights.return_cost = 2;
4590 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4592 static void
4593 prepend_lexical_block (tree current_block, tree new_block)
4595 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4596 BLOCK_SUBBLOCKS (current_block) = new_block;
4597 BLOCK_SUPERCONTEXT (new_block) = current_block;
4600 /* Add local variables from CALLEE to CALLER. */
4602 static inline void
4603 add_local_variables (struct function *callee, struct function *caller,
4604 copy_body_data *id)
4606 tree var;
4607 unsigned ix;
4609 FOR_EACH_LOCAL_DECL (callee, ix, var)
4610 if (!can_be_nonlocal (var, id))
4612 tree new_var = remap_decl (var, id);
4614 /* Remap debug-expressions. */
4615 if (VAR_P (new_var)
4616 && DECL_HAS_DEBUG_EXPR_P (var)
4617 && new_var != var)
4619 tree tem = DECL_DEBUG_EXPR (var);
4620 bool old_regimplify = id->regimplify;
4621 id->remapping_type_depth++;
4622 walk_tree (&tem, copy_tree_body_r, id, NULL);
4623 id->remapping_type_depth--;
4624 id->regimplify = old_regimplify;
4625 SET_DECL_DEBUG_EXPR (new_var, tem);
4626 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4628 add_local_decl (caller, new_var);
4632 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4633 have brought in or introduced any debug stmts for SRCVAR. */
4635 static inline void
4636 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4638 tree *remappedvarp = id->decl_map->get (srcvar);
4640 if (!remappedvarp)
4641 return;
4643 if (!VAR_P (*remappedvarp))
4644 return;
4646 if (*remappedvarp == id->retvar)
4647 return;
4649 tree tvar = target_for_debug_bind (*remappedvarp);
4650 if (!tvar)
4651 return;
4653 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4654 id->call_stmt);
4655 gimple_seq_add_stmt (bindings, stmt);
4658 /* For each inlined variable for which we may have debug bind stmts,
4659 add before GSI a final debug stmt resetting it, marking the end of
4660 its life, so that var-tracking knows it doesn't have to compute
4661 further locations for it. */
4663 static inline void
4664 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4666 tree var;
4667 unsigned ix;
4668 gimple_seq bindings = NULL;
4670 if (!gimple_in_ssa_p (id->src_cfun))
4671 return;
4673 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4674 return;
4676 for (var = DECL_ARGUMENTS (id->src_fn);
4677 var; var = DECL_CHAIN (var))
4678 reset_debug_binding (id, var, &bindings);
4680 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4681 reset_debug_binding (id, var, &bindings);
4683 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4686 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4688 static bool
4689 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4690 bitmap to_purge)
4692 tree use_retvar;
4693 tree fn;
4694 hash_map<tree, tree> *dst;
4695 hash_map<tree, tree> *st = NULL;
4696 tree return_slot;
4697 tree modify_dest;
4698 struct cgraph_edge *cg_edge;
4699 cgraph_inline_failed_t reason;
4700 basic_block return_block;
4701 edge e;
4702 gimple_stmt_iterator gsi, stmt_gsi;
4703 bool successfully_inlined = false;
4704 bool purge_dead_abnormal_edges;
4705 gcall *call_stmt;
4706 unsigned int prop_mask, src_properties;
4707 struct function *dst_cfun;
4708 tree simduid;
4709 use_operand_p use;
4710 gimple *simtenter_stmt = NULL;
4711 vec<tree> *simtvars_save;
4712 clone_info *info;
4714 /* The gimplifier uses input_location in too many places, such as
4715 internal_get_tmp_var (). */
4716 location_t saved_location = input_location;
4717 input_location = gimple_location (stmt);
4719 /* From here on, we're only interested in CALL_EXPRs. */
4720 call_stmt = dyn_cast <gcall *> (stmt);
4721 if (!call_stmt)
4722 goto egress;
4724 cg_edge = id->dst_node->get_edge (stmt);
4725 gcc_checking_assert (cg_edge);
4726 /* First, see if we can figure out what function is being called.
4727 If we cannot, then there is no hope of inlining the function. */
4728 if (cg_edge->indirect_unknown_callee)
4729 goto egress;
4730 fn = cg_edge->callee->decl;
4731 gcc_checking_assert (fn);
4733 /* If FN is a declaration of a function in a nested scope that was
4734 globally declared inline, we don't set its DECL_INITIAL.
4735 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4736 C++ front-end uses it for cdtors to refer to their internal
4737 declarations, that are not real functions. Fortunately those
4738 don't have trees to be saved, so we can tell by checking their
4739 gimple_body. */
4740 if (!DECL_INITIAL (fn)
4741 && DECL_ABSTRACT_ORIGIN (fn)
4742 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4743 fn = DECL_ABSTRACT_ORIGIN (fn);
4745 /* Don't try to inline functions that are not well-suited to inlining. */
4746 if (cg_edge->inline_failed)
4748 reason = cg_edge->inline_failed;
4749 /* If this call was originally indirect, we do not want to emit any
4750 inlining related warnings or sorry messages because there are no
4751 guarantees regarding those. */
4752 if (cg_edge->indirect_inlining_edge)
4753 goto egress;
4755 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4756 /* For extern inline functions that get redefined we always
4757 silently ignored always_inline flag. Better behavior would
4758 be to be able to keep both bodies and use extern inline body
4759 for inlining, but we can't do that because frontends overwrite
4760 the body. */
4761 && !cg_edge->callee->redefined_extern_inline
4762 /* During early inline pass, report only when optimization is
4763 not turned on. */
4764 && (symtab->global_info_ready
4765 || !optimize
4766 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4767 /* PR 20090218-1_0.c. Body can be provided by another module. */
4768 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4770 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4771 cgraph_inline_failed_string (reason));
4772 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4773 inform (gimple_location (stmt), "called from here");
4774 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4775 inform (DECL_SOURCE_LOCATION (cfun->decl),
4776 "called from this function");
4778 else if (opt_for_fn (fn, warn_inline)
4779 && DECL_DECLARED_INLINE_P (fn)
4780 && !DECL_NO_INLINE_WARNING_P (fn)
4781 && !DECL_IN_SYSTEM_HEADER (fn)
4782 && reason != CIF_UNSPECIFIED
4783 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4784 /* Do not warn about not inlined recursive calls. */
4785 && !cg_edge->recursive_p ()
4786 /* Avoid warnings during early inline pass. */
4787 && symtab->global_info_ready)
4789 auto_diagnostic_group d;
4790 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4791 fn, _(cgraph_inline_failed_string (reason))))
4793 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4794 inform (gimple_location (stmt), "called from here");
4795 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4796 inform (DECL_SOURCE_LOCATION (cfun->decl),
4797 "called from this function");
4800 goto egress;
4802 id->src_node = cg_edge->callee;
4804 /* If callee is thunk, all we need is to adjust the THIS pointer
4805 and redirect to function being thunked. */
4806 if (id->src_node->thunk)
4808 cgraph_edge *edge;
4809 tree virtual_offset = NULL;
4810 profile_count count = cg_edge->count;
4811 tree op;
4812 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4813 thunk_info *info = thunk_info::get (id->src_node);
4815 cgraph_edge::remove (cg_edge);
4816 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4817 gimple_uid (stmt),
4818 profile_count::one (),
4819 profile_count::one (),
4820 true);
4821 edge->count = count;
4822 if (info->virtual_offset_p)
4823 virtual_offset = size_int (info->virtual_value);
4824 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4825 NULL);
4826 gsi_insert_before (&iter, gimple_build_assign (op,
4827 gimple_call_arg (stmt, 0)),
4828 GSI_NEW_STMT);
4829 gcc_assert (info->this_adjusting);
4830 op = thunk_adjust (&iter, op, 1, info->fixed_offset,
4831 virtual_offset, info->indirect_offset);
4833 gimple_call_set_arg (stmt, 0, op);
4834 gimple_call_set_fndecl (stmt, edge->callee->decl);
4835 update_stmt (stmt);
4836 id->src_node->remove ();
4837 expand_call_inline (bb, stmt, id, to_purge);
4838 maybe_remove_unused_call_args (cfun, stmt);
4839 return true;
4841 fn = cg_edge->callee->decl;
4842 cg_edge->callee->get_untransformed_body ();
4844 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4845 cg_edge->callee->verify ();
4847 /* We will be inlining this callee. */
4848 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4850 /* Update the callers EH personality. */
4851 if (DECL_FUNCTION_PERSONALITY (fn))
4852 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4853 = DECL_FUNCTION_PERSONALITY (fn);
4855 /* Split the block before the GIMPLE_CALL. */
4856 stmt_gsi = gsi_for_stmt (stmt);
4857 gsi_prev (&stmt_gsi);
4858 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4859 bb = e->src;
4860 return_block = e->dest;
4861 remove_edge (e);
4863 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4864 been the source of abnormal edges. In this case, schedule
4865 the removal of dead abnormal edges. */
4866 gsi = gsi_start_bb (return_block);
4867 gsi_next (&gsi);
4868 purge_dead_abnormal_edges = gsi_end_p (gsi);
4870 stmt_gsi = gsi_start_bb (return_block);
4872 /* Build a block containing code to initialize the arguments, the
4873 actual inline expansion of the body, and a label for the return
4874 statements within the function to jump to. The type of the
4875 statement expression is the return type of the function call.
4876 ??? If the call does not have an associated block then we will
4877 remap all callee blocks to NULL, effectively dropping most of
4878 its debug information. This should only happen for calls to
4879 artificial decls inserted by the compiler itself. We need to
4880 either link the inlined blocks into the caller block tree or
4881 not refer to them in any way to not break GC for locations. */
4882 if (tree block = gimple_block (stmt))
4884 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4885 to make inlined_function_outer_scope_p return true on this BLOCK. */
4886 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4887 if (loc == UNKNOWN_LOCATION)
4888 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4889 if (loc == UNKNOWN_LOCATION)
4890 loc = BUILTINS_LOCATION;
4891 id->block = make_node (BLOCK);
4892 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4893 BLOCK_SOURCE_LOCATION (id->block) = loc;
4894 prepend_lexical_block (block, id->block);
4897 /* Local declarations will be replaced by their equivalents in this map. */
4898 st = id->decl_map;
4899 id->decl_map = new hash_map<tree, tree>;
4900 dst = id->debug_map;
4901 id->debug_map = NULL;
4902 if (flag_stack_reuse != SR_NONE)
4903 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4905 /* Record the function we are about to inline. */
4906 id->src_fn = fn;
4907 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4908 id->reset_location = DECL_IGNORED_P (fn);
4909 id->call_stmt = call_stmt;
4911 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4912 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4913 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4914 simtvars_save = id->dst_simt_vars;
4915 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4916 && (simduid = bb->loop_father->simduid) != NULL_TREE
4917 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4918 && single_imm_use (simduid, &use, &simtenter_stmt)
4919 && is_gimple_call (simtenter_stmt)
4920 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4921 vec_alloc (id->dst_simt_vars, 0);
4922 else
4923 id->dst_simt_vars = NULL;
4925 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4926 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4928 /* If the src function contains an IFN_VA_ARG, then so will the dst
4929 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4930 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4931 src_properties = id->src_cfun->curr_properties & prop_mask;
4932 if (src_properties != prop_mask)
4933 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4934 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4935 id->dst_node->calls_declare_variant_alt
4936 |= id->src_node->calls_declare_variant_alt;
4938 gcc_assert (!id->src_cfun->after_inlining);
4940 id->entry_bb = bb;
4941 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4943 gimple_stmt_iterator si = gsi_last_bb (bb);
4944 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4945 NOT_TAKEN),
4946 GSI_NEW_STMT);
4948 initialize_inlined_parameters (id, stmt, fn, bb);
4949 if (debug_nonbind_markers_p && debug_inline_points && id->block
4950 && inlined_function_outer_scope_p (id->block))
4952 gimple_stmt_iterator si = gsi_last_bb (bb);
4953 gsi_insert_after (&si, gimple_build_debug_inline_entry
4954 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4955 GSI_NEW_STMT);
4958 if (DECL_INITIAL (fn))
4960 if (gimple_block (stmt))
4962 tree *var;
4964 prepend_lexical_block (id->block,
4965 remap_blocks (DECL_INITIAL (fn), id));
4966 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4967 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4968 == NULL_TREE));
4969 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4970 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4971 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4972 under it. The parameters can be then evaluated in the debugger,
4973 but don't show in backtraces. */
4974 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4975 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4977 tree v = *var;
4978 *var = TREE_CHAIN (v);
4979 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4980 BLOCK_VARS (id->block) = v;
4982 else
4983 var = &TREE_CHAIN (*var);
4985 else
4986 remap_blocks_to_null (DECL_INITIAL (fn), id);
4989 /* Return statements in the function body will be replaced by jumps
4990 to the RET_LABEL. */
4991 gcc_assert (DECL_INITIAL (fn));
4992 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4994 /* Find the LHS to which the result of this call is assigned. */
4995 return_slot = NULL;
4996 if (gimple_call_lhs (stmt))
4998 modify_dest = gimple_call_lhs (stmt);
5000 /* The function which we are inlining might not return a value,
5001 in which case we should issue a warning that the function
5002 does not return a value. In that case the optimizers will
5003 see that the variable to which the value is assigned was not
5004 initialized. We do not want to issue a warning about that
5005 uninitialized variable. */
5006 if (DECL_P (modify_dest))
5007 TREE_NO_WARNING (modify_dest) = 1;
5009 if (gimple_call_return_slot_opt_p (call_stmt))
5011 return_slot = modify_dest;
5012 modify_dest = NULL;
5015 else
5016 modify_dest = NULL;
5018 /* If we are inlining a call to the C++ operator new, we don't want
5019 to use type based alias analysis on the return value. Otherwise
5020 we may get confused if the compiler sees that the inlined new
5021 function returns a pointer which was just deleted. See bug
5022 33407. */
5023 if (DECL_IS_OPERATOR_NEW_P (fn))
5025 return_slot = NULL;
5026 modify_dest = NULL;
5029 /* Declare the return variable for the function. */
5030 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5032 /* Add local vars in this inlined callee to caller. */
5033 add_local_variables (id->src_cfun, cfun, id);
5035 info = clone_info::get (id->src_node);
5036 if (info && info->performed_splits)
5038 clone_info *dst_info = clone_info::get_create (id->dst_node);
5039 /* Any calls from the inlined function will be turned into calls from the
5040 function we inline into. We must preserve notes about how to split
5041 parameters such calls should be redirected/updated. */
5042 unsigned len = vec_safe_length (info->performed_splits);
5043 for (unsigned i = 0; i < len; i++)
5045 ipa_param_performed_split ps
5046 = (*info->performed_splits)[i];
5047 ps.dummy_decl = remap_decl (ps.dummy_decl, id);
5048 vec_safe_push (dst_info->performed_splits, ps);
5051 if (flag_checking)
5053 len = vec_safe_length (dst_info->performed_splits);
5054 for (unsigned i = 0; i < len; i++)
5056 ipa_param_performed_split *ps1
5057 = &(*dst_info->performed_splits)[i];
5058 for (unsigned j = i + 1; j < len; j++)
5060 ipa_param_performed_split *ps2
5061 = &(*dst_info->performed_splits)[j];
5062 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
5063 || ps1->unit_offset != ps2->unit_offset);
5069 if (dump_enabled_p ())
5071 char buf[128];
5072 snprintf (buf, sizeof(buf), "%4.2f",
5073 cg_edge->sreal_frequency ().to_double ());
5074 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5075 call_stmt,
5076 "Inlining %C to %C with frequency %s\n",
5077 id->src_node, id->dst_node, buf);
5078 if (dump_file && (dump_flags & TDF_DETAILS))
5080 id->src_node->dump (dump_file);
5081 id->dst_node->dump (dump_file);
5085 /* This is it. Duplicate the callee body. Assume callee is
5086 pre-gimplified. Note that we must not alter the caller
5087 function in any way before this point, as this CALL_EXPR may be
5088 a self-referential call; if we're calling ourselves, we need to
5089 duplicate our body before altering anything. */
5090 copy_body (id, bb, return_block, NULL);
5092 reset_debug_bindings (id, stmt_gsi);
5094 if (flag_stack_reuse != SR_NONE)
5095 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5096 if (!TREE_THIS_VOLATILE (p))
5098 tree *varp = id->decl_map->get (p);
5099 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
5101 tree clobber = build_clobber (TREE_TYPE (*varp));
5102 gimple *clobber_stmt;
5103 clobber_stmt = gimple_build_assign (*varp, clobber);
5104 gimple_set_location (clobber_stmt, gimple_location (stmt));
5105 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5109 /* Reset the escaped solution. */
5110 if (cfun->gimple_df)
5111 pt_solution_reset (&cfun->gimple_df->escaped);
5113 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5114 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5116 size_t nargs = gimple_call_num_args (simtenter_stmt);
5117 vec<tree> *vars = id->dst_simt_vars;
5118 auto_vec<tree> newargs (nargs + vars->length ());
5119 for (size_t i = 0; i < nargs; i++)
5120 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5121 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5123 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5124 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5126 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5127 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5128 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5129 gsi_replace (&gsi, g, false);
5131 vec_free (id->dst_simt_vars);
5132 id->dst_simt_vars = simtvars_save;
5134 /* Clean up. */
5135 if (id->debug_map)
5137 delete id->debug_map;
5138 id->debug_map = dst;
5140 delete id->decl_map;
5141 id->decl_map = st;
5143 /* Unlink the calls virtual operands before replacing it. */
5144 unlink_stmt_vdef (stmt);
5145 if (gimple_vdef (stmt)
5146 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5147 release_ssa_name (gimple_vdef (stmt));
5149 /* If the inlined function returns a result that we care about,
5150 substitute the GIMPLE_CALL with an assignment of the return
5151 variable to the LHS of the call. That is, if STMT was
5152 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5153 if (use_retvar && gimple_call_lhs (stmt))
5155 gimple *old_stmt = stmt;
5156 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5157 gimple_set_location (stmt, gimple_location (old_stmt));
5158 gsi_replace (&stmt_gsi, stmt, false);
5159 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5160 /* Append a clobber for id->retvar if easily possible. */
5161 if (flag_stack_reuse != SR_NONE
5162 && id->retvar
5163 && VAR_P (id->retvar)
5164 && id->retvar != return_slot
5165 && id->retvar != modify_dest
5166 && !TREE_THIS_VOLATILE (id->retvar)
5167 && !is_gimple_reg (id->retvar)
5168 && !stmt_ends_bb_p (stmt))
5170 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5171 gimple *clobber_stmt;
5172 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5173 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5174 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5177 else
5179 /* Handle the case of inlining a function with no return
5180 statement, which causes the return value to become undefined. */
5181 if (gimple_call_lhs (stmt)
5182 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5184 tree name = gimple_call_lhs (stmt);
5185 tree var = SSA_NAME_VAR (name);
5186 tree def = var ? ssa_default_def (cfun, var) : NULL;
5188 if (def)
5190 /* If the variable is used undefined, make this name
5191 undefined via a move. */
5192 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5193 gsi_replace (&stmt_gsi, stmt, true);
5195 else
5197 if (!var)
5199 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5200 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5202 /* Otherwise make this variable undefined. */
5203 gsi_remove (&stmt_gsi, true);
5204 set_ssa_default_def (cfun, var, name);
5205 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5208 /* Replace with a clobber for id->retvar. */
5209 else if (flag_stack_reuse != SR_NONE
5210 && id->retvar
5211 && VAR_P (id->retvar)
5212 && id->retvar != return_slot
5213 && id->retvar != modify_dest
5214 && !TREE_THIS_VOLATILE (id->retvar)
5215 && !is_gimple_reg (id->retvar))
5217 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5218 gimple *clobber_stmt;
5219 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5220 gimple_set_location (clobber_stmt, gimple_location (stmt));
5221 gsi_replace (&stmt_gsi, clobber_stmt, false);
5222 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5224 else
5225 gsi_remove (&stmt_gsi, true);
5228 if (purge_dead_abnormal_edges)
5229 bitmap_set_bit (to_purge, return_block->index);
5231 /* If the value of the new expression is ignored, that's OK. We
5232 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5233 the equivalent inlined version either. */
5234 if (is_gimple_assign (stmt))
5236 gcc_assert (gimple_assign_single_p (stmt)
5237 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5238 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5241 id->add_clobbers_to_eh_landing_pads = 0;
5243 /* Output the inlining info for this abstract function, since it has been
5244 inlined. If we don't do this now, we can lose the information about the
5245 variables in the function when the blocks get blown away as soon as we
5246 remove the cgraph node. */
5247 if (gimple_block (stmt))
5248 (*debug_hooks->outlining_inline_function) (fn);
5250 /* Update callgraph if needed. */
5251 cg_edge->callee->remove ();
5253 id->block = NULL_TREE;
5254 id->retvar = NULL_TREE;
5255 successfully_inlined = true;
5257 egress:
5258 input_location = saved_location;
5259 return successfully_inlined;
5262 /* Expand call statements reachable from STMT_P.
5263 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5264 in a MODIFY_EXPR. */
5266 static bool
5267 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5268 bitmap to_purge)
5270 gimple_stmt_iterator gsi;
5271 bool inlined = false;
5273 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5275 gimple *stmt = gsi_stmt (gsi);
5276 gsi_prev (&gsi);
5278 if (is_gimple_call (stmt)
5279 && !gimple_call_internal_p (stmt))
5280 inlined |= expand_call_inline (bb, stmt, id, to_purge);
5283 return inlined;
5287 /* Walk all basic blocks created after FIRST and try to fold every statement
5288 in the STATEMENTS pointer set. */
5290 static void
5291 fold_marked_statements (int first, hash_set<gimple *> *statements)
5293 auto_bitmap to_purge;
5295 auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5296 auto_sbitmap visited (last_basic_block_for_fn (cfun));
5297 bitmap_clear (visited);
5299 stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5300 while (!stack.is_empty ())
5302 /* Look at the edge on the top of the stack. */
5303 edge e = stack.pop ();
5304 basic_block dest = e->dest;
5306 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5307 || bitmap_bit_p (visited, dest->index))
5308 continue;
5310 bitmap_set_bit (visited, dest->index);
5312 if (dest->index >= first)
5313 for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5314 !gsi_end_p (gsi); gsi_next (&gsi))
5316 if (!statements->contains (gsi_stmt (gsi)))
5317 continue;
5319 gimple *old_stmt = gsi_stmt (gsi);
5320 tree old_decl = (is_gimple_call (old_stmt)
5321 ? gimple_call_fndecl (old_stmt) : 0);
5322 if (old_decl && fndecl_built_in_p (old_decl))
5324 /* Folding builtins can create multiple instructions,
5325 we need to look at all of them. */
5326 gimple_stmt_iterator i2 = gsi;
5327 gsi_prev (&i2);
5328 if (fold_stmt (&gsi))
5330 gimple *new_stmt;
5331 /* If a builtin at the end of a bb folded into nothing,
5332 the following loop won't work. */
5333 if (gsi_end_p (gsi))
5335 cgraph_update_edges_for_call_stmt (old_stmt,
5336 old_decl, NULL);
5337 break;
5339 if (gsi_end_p (i2))
5340 i2 = gsi_start_bb (dest);
5341 else
5342 gsi_next (&i2);
5343 while (1)
5345 new_stmt = gsi_stmt (i2);
5346 update_stmt (new_stmt);
5347 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5348 new_stmt);
5350 if (new_stmt == gsi_stmt (gsi))
5352 /* It is okay to check only for the very last
5353 of these statements. If it is a throwing
5354 statement nothing will change. If it isn't
5355 this can remove EH edges. If that weren't
5356 correct then because some intermediate stmts
5357 throw, but not the last one. That would mean
5358 we'd have to split the block, which we can't
5359 here and we'd loose anyway. And as builtins
5360 probably never throw, this all
5361 is mood anyway. */
5362 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5363 new_stmt))
5364 bitmap_set_bit (to_purge, dest->index);
5365 break;
5367 gsi_next (&i2);
5371 else if (fold_stmt (&gsi))
5373 /* Re-read the statement from GSI as fold_stmt() may
5374 have changed it. */
5375 gimple *new_stmt = gsi_stmt (gsi);
5376 update_stmt (new_stmt);
5378 if (is_gimple_call (old_stmt)
5379 || is_gimple_call (new_stmt))
5380 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5381 new_stmt);
5383 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5384 bitmap_set_bit (to_purge, dest->index);
5388 if (EDGE_COUNT (dest->succs) > 0)
5390 /* Avoid warnings emitted from folding statements that
5391 became unreachable because of inlined function parameter
5392 propagation. */
5393 e = find_taken_edge (dest, NULL_TREE);
5394 if (e)
5395 stack.quick_push (e);
5396 else
5398 edge_iterator ei;
5399 FOR_EACH_EDGE (e, ei, dest->succs)
5400 stack.safe_push (e);
5405 gimple_purge_all_dead_eh_edges (to_purge);
5408 /* Expand calls to inline functions in the body of FN. */
5410 unsigned int
5411 optimize_inline_calls (tree fn)
5413 copy_body_data id;
5414 basic_block bb;
5415 int last = n_basic_blocks_for_fn (cfun);
5416 bool inlined_p = false;
5418 /* Clear out ID. */
5419 memset (&id, 0, sizeof (id));
5421 id.src_node = id.dst_node = cgraph_node::get (fn);
5422 gcc_assert (id.dst_node->definition);
5423 id.dst_fn = fn;
5424 /* Or any functions that aren't finished yet. */
5425 if (current_function_decl)
5426 id.dst_fn = current_function_decl;
5428 id.copy_decl = copy_decl_maybe_to_var;
5429 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5430 id.transform_new_cfg = false;
5431 id.transform_return_to_modify = true;
5432 id.transform_parameter = true;
5433 id.transform_lang_insert_block = NULL;
5434 id.statements_to_fold = new hash_set<gimple *>;
5436 push_gimplify_context ();
5438 /* We make no attempts to keep dominance info up-to-date. */
5439 free_dominance_info (CDI_DOMINATORS);
5440 free_dominance_info (CDI_POST_DOMINATORS);
5442 /* Register specific gimple functions. */
5443 gimple_register_cfg_hooks ();
5445 /* Reach the trees by walking over the CFG, and note the
5446 enclosing basic-blocks in the call edges. */
5447 /* We walk the blocks going forward, because inlined function bodies
5448 will split id->current_basic_block, and the new blocks will
5449 follow it; we'll trudge through them, processing their CALL_EXPRs
5450 along the way. */
5451 auto_bitmap to_purge;
5452 FOR_EACH_BB_FN (bb, cfun)
5453 inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5455 pop_gimplify_context (NULL);
5457 if (flag_checking)
5459 struct cgraph_edge *e;
5461 id.dst_node->verify ();
5463 /* Double check that we inlined everything we are supposed to inline. */
5464 for (e = id.dst_node->callees; e; e = e->next_callee)
5465 gcc_assert (e->inline_failed);
5468 /* If we didn't inline into the function there is nothing to do. */
5469 if (!inlined_p)
5471 delete id.statements_to_fold;
5472 return 0;
5475 /* Fold queued statements. */
5476 update_max_bb_count ();
5477 fold_marked_statements (last, id.statements_to_fold);
5478 delete id.statements_to_fold;
5480 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5481 We need to do this after fold_marked_statements since that may walk
5482 the SSA use-def chain. */
5483 unsigned i;
5484 bitmap_iterator bi;
5485 EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5487 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5488 if (bb)
5490 gimple_purge_dead_eh_edges (bb);
5491 gimple_purge_dead_abnormal_call_edges (bb);
5495 gcc_assert (!id.debug_stmts.exists ());
5497 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5498 number_blocks (fn);
5500 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5501 id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5503 if (flag_checking)
5504 id.dst_node->verify ();
5506 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5507 not possible yet - the IPA passes might make various functions to not
5508 throw and they don't care to proactively update local EH info. This is
5509 done later in fixup_cfg pass that also execute the verification. */
5510 return (TODO_update_ssa
5511 | TODO_cleanup_cfg
5512 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5513 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5514 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5515 ? TODO_rebuild_frequencies : 0));
5518 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5520 tree
5521 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5523 enum tree_code code = TREE_CODE (*tp);
5524 enum tree_code_class cl = TREE_CODE_CLASS (code);
5526 /* We make copies of most nodes. */
5527 if (IS_EXPR_CODE_CLASS (cl)
5528 || code == TREE_LIST
5529 || code == TREE_VEC
5530 || code == TYPE_DECL
5531 || code == OMP_CLAUSE)
5533 /* Because the chain gets clobbered when we make a copy, we save it
5534 here. */
5535 tree chain = NULL_TREE, new_tree;
5537 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5538 chain = TREE_CHAIN (*tp);
5540 /* Copy the node. */
5541 new_tree = copy_node (*tp);
5543 *tp = new_tree;
5545 /* Now, restore the chain, if appropriate. That will cause
5546 walk_tree to walk into the chain as well. */
5547 if (code == PARM_DECL
5548 || code == TREE_LIST
5549 || code == OMP_CLAUSE)
5550 TREE_CHAIN (*tp) = chain;
5552 /* For now, we don't update BLOCKs when we make copies. So, we
5553 have to nullify all BIND_EXPRs. */
5554 if (TREE_CODE (*tp) == BIND_EXPR)
5555 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5557 else if (code == CONSTRUCTOR)
5559 /* CONSTRUCTOR nodes need special handling because
5560 we need to duplicate the vector of elements. */
5561 tree new_tree;
5563 new_tree = copy_node (*tp);
5564 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5565 *tp = new_tree;
5567 else if (code == STATEMENT_LIST)
5568 /* We used to just abort on STATEMENT_LIST, but we can run into them
5569 with statement-expressions (c++/40975). */
5570 copy_statement_list (tp);
5571 else if (TREE_CODE_CLASS (code) == tcc_type)
5572 *walk_subtrees = 0;
5573 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5574 *walk_subtrees = 0;
5575 else if (TREE_CODE_CLASS (code) == tcc_constant)
5576 *walk_subtrees = 0;
5577 return NULL_TREE;
5580 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5581 information indicating to what new SAVE_EXPR this one should be mapped,
5582 use that one. Otherwise, create a new node and enter it in ST. FN is
5583 the function into which the copy will be placed. */
5585 static void
5586 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5588 tree *n;
5589 tree t;
5591 /* See if we already encountered this SAVE_EXPR. */
5592 n = st->get (*tp);
5594 /* If we didn't already remap this SAVE_EXPR, do so now. */
5595 if (!n)
5597 t = copy_node (*tp);
5599 /* Remember this SAVE_EXPR. */
5600 st->put (*tp, t);
5601 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5602 st->put (t, t);
5604 else
5606 /* We've already walked into this SAVE_EXPR; don't do it again. */
5607 *walk_subtrees = 0;
5608 t = *n;
5611 /* Replace this SAVE_EXPR with the copy. */
5612 *tp = t;
5615 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5616 label, copies the declaration and enters it in the splay_tree in DATA (which
5617 is really a 'copy_body_data *'. */
5619 static tree
5620 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5621 bool *handled_ops_p ATTRIBUTE_UNUSED,
5622 struct walk_stmt_info *wi)
5624 copy_body_data *id = (copy_body_data *) wi->info;
5625 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5627 if (stmt)
5629 tree decl = gimple_label_label (stmt);
5631 /* Copy the decl and remember the copy. */
5632 insert_decl_map (id, decl, id->copy_decl (decl, id));
5635 return NULL_TREE;
5638 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5639 struct walk_stmt_info *wi);
5641 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5642 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5643 remaps all local declarations to appropriate replacements in gimple
5644 operands. */
5646 static tree
5647 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5649 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5650 copy_body_data *id = (copy_body_data *) wi->info;
5651 hash_map<tree, tree> *st = id->decl_map;
5652 tree *n;
5653 tree expr = *tp;
5655 /* For recursive invocations this is no longer the LHS itself. */
5656 bool is_lhs = wi->is_lhs;
5657 wi->is_lhs = false;
5659 if (TREE_CODE (expr) == SSA_NAME)
5661 *tp = remap_ssa_name (*tp, id);
5662 *walk_subtrees = 0;
5663 if (is_lhs)
5664 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5666 /* Only a local declaration (variable or label). */
5667 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5668 || TREE_CODE (expr) == LABEL_DECL)
5670 /* Lookup the declaration. */
5671 n = st->get (expr);
5673 /* If it's there, remap it. */
5674 if (n)
5675 *tp = *n;
5676 *walk_subtrees = 0;
5678 else if (TREE_CODE (expr) == STATEMENT_LIST
5679 || TREE_CODE (expr) == BIND_EXPR
5680 || TREE_CODE (expr) == SAVE_EXPR)
5681 gcc_unreachable ();
5682 else if (TREE_CODE (expr) == TARGET_EXPR)
5684 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5685 It's OK for this to happen if it was part of a subtree that
5686 isn't immediately expanded, such as operand 2 of another
5687 TARGET_EXPR. */
5688 if (!TREE_OPERAND (expr, 1))
5690 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5691 TREE_OPERAND (expr, 3) = NULL_TREE;
5694 else if (TREE_CODE (expr) == OMP_CLAUSE)
5696 /* Before the omplower pass completes, some OMP clauses can contain
5697 sequences that are neither copied by gimple_seq_copy nor walked by
5698 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5699 in those situations, we have to copy and process them explicitely. */
5701 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5703 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5704 seq = duplicate_remap_omp_clause_seq (seq, wi);
5705 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5707 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5709 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5710 seq = duplicate_remap_omp_clause_seq (seq, wi);
5711 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5713 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5715 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5716 seq = duplicate_remap_omp_clause_seq (seq, wi);
5717 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5718 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5719 seq = duplicate_remap_omp_clause_seq (seq, wi);
5720 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5724 /* Keep iterating. */
5725 return NULL_TREE;
5729 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5730 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5731 remaps all local declarations to appropriate replacements in gimple
5732 statements. */
5734 static tree
5735 replace_locals_stmt (gimple_stmt_iterator *gsip,
5736 bool *handled_ops_p ATTRIBUTE_UNUSED,
5737 struct walk_stmt_info *wi)
5739 copy_body_data *id = (copy_body_data *) wi->info;
5740 gimple *gs = gsi_stmt (*gsip);
5742 if (gbind *stmt = dyn_cast <gbind *> (gs))
5744 tree block = gimple_bind_block (stmt);
5746 if (block)
5748 remap_block (&block, id);
5749 gimple_bind_set_block (stmt, block);
5752 /* This will remap a lot of the same decls again, but this should be
5753 harmless. */
5754 if (gimple_bind_vars (stmt))
5756 tree old_var, decls = gimple_bind_vars (stmt);
5758 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5759 if (!can_be_nonlocal (old_var, id)
5760 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5761 remap_decl (old_var, id);
5763 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5764 id->prevent_decl_creation_for_types = true;
5765 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5766 id->prevent_decl_creation_for_types = false;
5770 /* Keep iterating. */
5771 return NULL_TREE;
5774 /* Create a copy of SEQ and remap all decls in it. */
5776 static gimple_seq
5777 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5779 if (!seq)
5780 return NULL;
5782 /* If there are any labels in OMP sequences, they can be only referred to in
5783 the sequence itself and therefore we can do both here. */
5784 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5785 gimple_seq copy = gimple_seq_copy (seq);
5786 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5787 return copy;
5790 /* Copies everything in SEQ and replaces variables and labels local to
5791 current_function_decl. */
5793 gimple_seq
5794 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5796 copy_body_data id;
5797 struct walk_stmt_info wi;
5798 gimple_seq copy;
5800 /* There's nothing to do for NULL_TREE. */
5801 if (seq == NULL)
5802 return seq;
5804 /* Set up ID. */
5805 memset (&id, 0, sizeof (id));
5806 id.src_fn = current_function_decl;
5807 id.dst_fn = current_function_decl;
5808 id.src_cfun = cfun;
5809 id.decl_map = new hash_map<tree, tree>;
5810 id.debug_map = NULL;
5812 id.copy_decl = copy_decl_no_change;
5813 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5814 id.transform_new_cfg = false;
5815 id.transform_return_to_modify = false;
5816 id.transform_parameter = false;
5817 id.transform_lang_insert_block = NULL;
5819 /* Walk the tree once to find local labels. */
5820 memset (&wi, 0, sizeof (wi));
5821 hash_set<tree> visited;
5822 wi.info = &id;
5823 wi.pset = &visited;
5824 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5826 copy = gimple_seq_copy (seq);
5828 /* Walk the copy, remapping decls. */
5829 memset (&wi, 0, sizeof (wi));
5830 wi.info = &id;
5831 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5833 /* Clean up. */
5834 delete id.decl_map;
5835 if (id.debug_map)
5836 delete id.debug_map;
5837 if (id.dependence_map)
5839 delete id.dependence_map;
5840 id.dependence_map = NULL;
5843 return copy;
5847 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5849 static tree
5850 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5852 if (*tp == data)
5853 return (tree) data;
5854 else
5855 return NULL;
5858 DEBUG_FUNCTION bool
5859 debug_find_tree (tree top, tree search)
5861 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5865 /* Declare the variables created by the inliner. Add all the variables in
5866 VARS to BIND_EXPR. */
5868 static void
5869 declare_inline_vars (tree block, tree vars)
5871 tree t;
5872 for (t = vars; t; t = DECL_CHAIN (t))
5874 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5875 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5876 add_local_decl (cfun, t);
5879 if (block)
5880 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5883 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5884 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5885 VAR_DECL translation. */
5887 tree
5888 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5890 /* Don't generate debug information for the copy if we wouldn't have
5891 generated it for the copy either. */
5892 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5893 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5895 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5896 declaration inspired this copy. */
5897 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5899 /* The new variable/label has no RTL, yet. */
5900 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5901 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5902 SET_DECL_RTL (copy, 0);
5903 /* For vector typed decls make sure to update DECL_MODE according
5904 to the new function context. */
5905 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5906 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5908 /* These args would always appear unused, if not for this. */
5909 TREE_USED (copy) = 1;
5911 /* Set the context for the new declaration. */
5912 if (!DECL_CONTEXT (decl))
5913 /* Globals stay global. */
5915 else if (DECL_CONTEXT (decl) != id->src_fn)
5916 /* Things that weren't in the scope of the function we're inlining
5917 from aren't in the scope we're inlining to, either. */
5919 else if (TREE_STATIC (decl))
5920 /* Function-scoped static variables should stay in the original
5921 function. */
5923 else
5925 /* Ordinary automatic local variables are now in the scope of the
5926 new function. */
5927 DECL_CONTEXT (copy) = id->dst_fn;
5928 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5930 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5931 DECL_ATTRIBUTES (copy)
5932 = tree_cons (get_identifier ("omp simt private"), NULL,
5933 DECL_ATTRIBUTES (copy));
5934 id->dst_simt_vars->safe_push (copy);
5938 return copy;
5941 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5942 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5943 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5945 tree
5946 copy_decl_to_var (tree decl, copy_body_data *id)
5948 tree copy, type;
5950 gcc_assert (TREE_CODE (decl) == PARM_DECL
5951 || TREE_CODE (decl) == RESULT_DECL);
5953 type = TREE_TYPE (decl);
5955 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5956 VAR_DECL, DECL_NAME (decl), type);
5957 if (DECL_PT_UID_SET_P (decl))
5958 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5959 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5960 TREE_READONLY (copy) = TREE_READONLY (decl);
5961 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5962 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
5963 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
5965 return copy_decl_for_dup_finish (id, decl, copy);
5968 /* Like copy_decl_to_var, but create a return slot object instead of a
5969 pointer variable for return by invisible reference. */
5971 static tree
5972 copy_result_decl_to_var (tree decl, copy_body_data *id)
5974 tree copy, type;
5976 gcc_assert (TREE_CODE (decl) == PARM_DECL
5977 || TREE_CODE (decl) == RESULT_DECL);
5979 type = TREE_TYPE (decl);
5980 if (DECL_BY_REFERENCE (decl))
5981 type = TREE_TYPE (type);
5983 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5984 VAR_DECL, DECL_NAME (decl), type);
5985 if (DECL_PT_UID_SET_P (decl))
5986 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5987 TREE_READONLY (copy) = TREE_READONLY (decl);
5988 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5989 if (!DECL_BY_REFERENCE (decl))
5991 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5992 DECL_NOT_GIMPLE_REG_P (copy)
5993 = (DECL_NOT_GIMPLE_REG_P (decl)
5994 /* RESULT_DECLs are treated special by needs_to_live_in_memory,
5995 mirror that to the created VAR_DECL. */
5996 || (TREE_CODE (decl) == RESULT_DECL
5997 && aggregate_value_p (decl, id->src_fn)));
6000 return copy_decl_for_dup_finish (id, decl, copy);
6003 tree
6004 copy_decl_no_change (tree decl, copy_body_data *id)
6006 tree copy;
6008 copy = copy_node (decl);
6010 /* The COPY is not abstract; it will be generated in DST_FN. */
6011 DECL_ABSTRACT_P (copy) = false;
6012 lang_hooks.dup_lang_specific_decl (copy);
6014 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6015 been taken; it's for internal bookkeeping in expand_goto_internal. */
6016 if (TREE_CODE (copy) == LABEL_DECL)
6018 TREE_ADDRESSABLE (copy) = 0;
6019 LABEL_DECL_UID (copy) = -1;
6022 return copy_decl_for_dup_finish (id, decl, copy);
6025 static tree
6026 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6028 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6029 return copy_decl_to_var (decl, id);
6030 else
6031 return copy_decl_no_change (decl, id);
6034 /* Return a copy of the function's argument tree without any modifications. */
6036 static tree
6037 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6039 tree arg, *parg;
6040 tree new_parm = NULL;
6042 parg = &new_parm;
6043 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6045 tree new_tree = remap_decl (arg, id);
6046 if (TREE_CODE (new_tree) != PARM_DECL)
6047 new_tree = id->copy_decl (arg, id);
6048 lang_hooks.dup_lang_specific_decl (new_tree);
6049 *parg = new_tree;
6050 parg = &DECL_CHAIN (new_tree);
6052 return new_parm;
6055 /* Return a copy of the function's static chain. */
6056 static tree
6057 copy_static_chain (tree static_chain, copy_body_data * id)
6059 tree *chain_copy, *pvar;
6061 chain_copy = &static_chain;
6062 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6064 tree new_tree = remap_decl (*pvar, id);
6065 lang_hooks.dup_lang_specific_decl (new_tree);
6066 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6067 *pvar = new_tree;
6069 return static_chain;
6072 /* Return true if the function is allowed to be versioned.
6073 This is a guard for the versioning functionality. */
6075 bool
6076 tree_versionable_function_p (tree fndecl)
6078 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6079 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6082 /* Update clone info after duplication. */
6084 static void
6085 update_clone_info (copy_body_data * id)
6087 clone_info *dst_info = clone_info::get (id->dst_node);
6088 vec<ipa_param_performed_split, va_gc> *cur_performed_splits
6089 = dst_info ? dst_info->performed_splits : NULL;
6090 if (cur_performed_splits)
6092 unsigned len = cur_performed_splits->length ();
6093 for (unsigned i = 0; i < len; i++)
6095 ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
6096 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6100 struct cgraph_node *node;
6101 if (!id->dst_node->clones)
6102 return;
6103 for (node = id->dst_node->clones; node != id->dst_node;)
6105 /* First update replace maps to match the new body. */
6106 clone_info *info = clone_info::get (node);
6107 if (info && info->tree_map)
6109 unsigned int i;
6110 for (i = 0; i < vec_safe_length (info->tree_map); i++)
6112 struct ipa_replace_map *replace_info;
6113 replace_info = (*info->tree_map)[i];
6114 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6117 if (info && info->performed_splits)
6119 unsigned len = vec_safe_length (info->performed_splits);
6120 for (unsigned i = 0; i < len; i++)
6122 ipa_param_performed_split *ps
6123 = &(*info->performed_splits)[i];
6124 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6127 if (unsigned len = vec_safe_length (cur_performed_splits))
6129 /* We do not want to add current performed splits when we are saving
6130 a copy of function body for later during inlining, that would just
6131 duplicate all entries. So let's have a look whether anything
6132 referring to the first dummy_decl is present. */
6133 if (!info)
6134 info = clone_info::get_create (node);
6135 unsigned dst_len = vec_safe_length (info->performed_splits);
6136 ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6137 for (unsigned i = 0; i < dst_len; i++)
6138 if ((*info->performed_splits)[i].dummy_decl
6139 == first->dummy_decl)
6141 len = 0;
6142 break;
6145 for (unsigned i = 0; i < len; i++)
6146 vec_safe_push (info->performed_splits,
6147 (*cur_performed_splits)[i]);
6148 if (flag_checking)
6150 for (unsigned i = 0; i < dst_len; i++)
6152 ipa_param_performed_split *ps1
6153 = &(*info->performed_splits)[i];
6154 for (unsigned j = i + 1; j < dst_len; j++)
6156 ipa_param_performed_split *ps2
6157 = &(*info->performed_splits)[j];
6158 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6159 || ps1->unit_offset != ps2->unit_offset);
6165 if (node->clones)
6166 node = node->clones;
6167 else if (node->next_sibling_clone)
6168 node = node->next_sibling_clone;
6169 else
6171 while (node != id->dst_node && !node->next_sibling_clone)
6172 node = node->clone_of;
6173 if (node != id->dst_node)
6174 node = node->next_sibling_clone;
6179 /* Create a copy of a function's tree.
6180 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6181 of the original function and the new copied function
6182 respectively. In case we want to replace a DECL
6183 tree with another tree while duplicating the function's
6184 body, TREE_MAP represents the mapping between these
6185 trees. If UPDATE_CLONES is set, the call_stmt fields
6186 of edges of clones of the function will be updated.
6188 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6189 function parameters and return value) should be modified).
6190 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6191 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6193 void
6194 tree_function_versioning (tree old_decl, tree new_decl,
6195 vec<ipa_replace_map *, va_gc> *tree_map,
6196 ipa_param_adjustments *param_adjustments,
6197 bool update_clones, bitmap blocks_to_copy,
6198 basic_block new_entry)
6200 struct cgraph_node *old_version_node;
6201 struct cgraph_node *new_version_node;
6202 copy_body_data id;
6203 tree p;
6204 unsigned i;
6205 struct ipa_replace_map *replace_info;
6206 basic_block old_entry_block, bb;
6207 auto_vec<gimple *, 10> init_stmts;
6208 tree vars = NULL_TREE;
6210 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6211 && TREE_CODE (new_decl) == FUNCTION_DECL);
6212 DECL_POSSIBLY_INLINED (old_decl) = 1;
6214 old_version_node = cgraph_node::get (old_decl);
6215 gcc_checking_assert (old_version_node);
6216 new_version_node = cgraph_node::get (new_decl);
6217 gcc_checking_assert (new_version_node);
6219 /* Copy over debug args. */
6220 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6222 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6223 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6224 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6225 old_debug_args = decl_debug_args_lookup (old_decl);
6226 if (old_debug_args)
6228 new_debug_args = decl_debug_args_insert (new_decl);
6229 *new_debug_args = vec_safe_copy (*old_debug_args);
6233 /* Output the inlining info for this abstract function, since it has been
6234 inlined. If we don't do this now, we can lose the information about the
6235 variables in the function when the blocks get blown away as soon as we
6236 remove the cgraph node. */
6237 (*debug_hooks->outlining_inline_function) (old_decl);
6239 DECL_ARTIFICIAL (new_decl) = 1;
6240 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6241 if (DECL_ORIGIN (old_decl) == old_decl)
6242 old_version_node->used_as_abstract_origin = true;
6243 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6245 /* Prepare the data structures for the tree copy. */
6246 memset (&id, 0, sizeof (id));
6248 /* Generate a new name for the new version. */
6249 id.statements_to_fold = new hash_set<gimple *>;
6251 id.decl_map = new hash_map<tree, tree>;
6252 id.debug_map = NULL;
6253 id.src_fn = old_decl;
6254 id.dst_fn = new_decl;
6255 id.src_node = old_version_node;
6256 id.dst_node = new_version_node;
6257 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6258 id.blocks_to_copy = blocks_to_copy;
6260 id.copy_decl = copy_decl_no_change;
6261 id.transform_call_graph_edges
6262 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6263 id.transform_new_cfg = true;
6264 id.transform_return_to_modify = false;
6265 id.transform_parameter = false;
6266 id.transform_lang_insert_block = NULL;
6268 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6269 (DECL_STRUCT_FUNCTION (old_decl));
6270 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6271 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6272 initialize_cfun (new_decl, old_decl,
6273 new_entry ? new_entry->count : old_entry_block->count);
6274 new_version_node->calls_declare_variant_alt
6275 = old_version_node->calls_declare_variant_alt;
6276 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6277 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6278 = id.src_cfun->gimple_df->ipa_pta;
6280 /* Copy the function's static chain. */
6281 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6282 if (p)
6283 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6284 = copy_static_chain (p, &id);
6286 auto_vec<int, 16> new_param_indices;
6287 clone_info *info = clone_info::get (old_version_node);
6288 ipa_param_adjustments *old_param_adjustments
6289 = info ? info->param_adjustments : NULL;
6290 if (old_param_adjustments)
6291 old_param_adjustments->get_updated_indices (&new_param_indices);
6293 /* If there's a tree_map, prepare for substitution. */
6294 if (tree_map)
6295 for (i = 0; i < tree_map->length (); i++)
6297 gimple *init;
6298 replace_info = (*tree_map)[i];
6300 int p = replace_info->parm_num;
6301 if (old_param_adjustments)
6302 p = new_param_indices[p];
6304 tree parm;
6305 for (parm = DECL_ARGUMENTS (old_decl); p;
6306 parm = DECL_CHAIN (parm))
6307 p--;
6308 gcc_assert (parm);
6309 init = setup_one_parameter (&id, parm, replace_info->new_tree,
6310 id.src_fn, NULL, &vars);
6311 if (init)
6312 init_stmts.safe_push (init);
6315 ipa_param_body_adjustments *param_body_adjs = NULL;
6316 if (param_adjustments)
6318 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6319 new_decl, old_decl,
6320 &id, &vars, tree_map);
6321 id.param_body_adjs = param_body_adjs;
6322 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6324 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6325 DECL_ARGUMENTS (new_decl)
6326 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6328 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6329 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6331 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6333 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6334 /* Add local vars. */
6335 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6337 if (DECL_RESULT (old_decl) == NULL_TREE)
6339 else if (param_adjustments && param_adjustments->m_skip_return
6340 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6342 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6343 &id);
6344 declare_inline_vars (NULL, resdecl_repl);
6345 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6347 DECL_RESULT (new_decl)
6348 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6349 RESULT_DECL, NULL_TREE, void_type_node);
6350 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6351 DECL_IS_MALLOC (new_decl) = false;
6352 cfun->returns_struct = 0;
6353 cfun->returns_pcc_struct = 0;
6355 else
6357 tree old_name;
6358 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6359 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6360 if (gimple_in_ssa_p (id.src_cfun)
6361 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6362 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6364 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6365 insert_decl_map (&id, old_name, new_name);
6366 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6367 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6371 /* Set up the destination functions loop tree. */
6372 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6374 cfun->curr_properties &= ~PROP_loops;
6375 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6376 cfun->curr_properties |= PROP_loops;
6379 /* Copy the Function's body. */
6380 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6381 new_entry);
6383 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6384 number_blocks (new_decl);
6386 /* We want to create the BB unconditionally, so that the addition of
6387 debug stmts doesn't affect BB count, which may in the end cause
6388 codegen differences. */
6389 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6390 while (init_stmts.length ())
6391 insert_init_stmt (&id, bb, init_stmts.pop ());
6392 update_clone_info (&id);
6394 /* Remap the nonlocal_goto_save_area, if any. */
6395 if (cfun->nonlocal_goto_save_area)
6397 struct walk_stmt_info wi;
6399 memset (&wi, 0, sizeof (wi));
6400 wi.info = &id;
6401 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6404 /* Clean up. */
6405 delete id.decl_map;
6406 if (id.debug_map)
6407 delete id.debug_map;
6408 free_dominance_info (CDI_DOMINATORS);
6409 free_dominance_info (CDI_POST_DOMINATORS);
6411 update_max_bb_count ();
6412 fold_marked_statements (0, id.statements_to_fold);
6413 delete id.statements_to_fold;
6414 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6415 if (id.dst_node->definition)
6416 cgraph_edge::rebuild_references ();
6417 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6419 calculate_dominance_info (CDI_DOMINATORS);
6420 fix_loop_structure (NULL);
6422 update_ssa (TODO_update_ssa);
6424 /* After partial cloning we need to rescale frequencies, so they are
6425 within proper range in the cloned function. */
6426 if (new_entry)
6428 struct cgraph_edge *e;
6429 rebuild_frequencies ();
6431 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6432 for (e = new_version_node->callees; e; e = e->next_callee)
6434 basic_block bb = gimple_bb (e->call_stmt);
6435 e->count = bb->count;
6437 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6439 basic_block bb = gimple_bb (e->call_stmt);
6440 e->count = bb->count;
6444 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6446 vec<tree, va_gc> **debug_args = NULL;
6447 unsigned int len = 0;
6448 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6450 for (i = 0; i < reset_len; i++)
6452 tree parm = param_body_adjs->m_reset_debug_decls[i];
6453 gcc_assert (is_gimple_reg (parm));
6454 tree ddecl;
6456 if (debug_args == NULL)
6458 debug_args = decl_debug_args_insert (new_decl);
6459 len = vec_safe_length (*debug_args);
6461 ddecl = make_node (DEBUG_EXPR_DECL);
6462 DECL_ARTIFICIAL (ddecl) = 1;
6463 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6464 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6465 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6466 vec_safe_push (*debug_args, ddecl);
6468 if (debug_args != NULL)
6470 /* On the callee side, add
6471 DEBUG D#Y s=> parm
6472 DEBUG var => D#Y
6473 stmts to the first bb where var is a VAR_DECL created for the
6474 optimized away parameter in DECL_INITIAL block. This hints
6475 in the debug info that var (whole DECL_ORIGIN is the parm
6476 PARM_DECL) is optimized away, but could be looked up at the
6477 call site as value of D#X there. */
6478 tree vexpr;
6479 gimple_stmt_iterator cgsi
6480 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6481 gimple *def_temp;
6482 tree var = vars;
6483 i = vec_safe_length (*debug_args);
6486 i -= 2;
6487 while (var != NULL_TREE
6488 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6489 var = TREE_CHAIN (var);
6490 if (var == NULL_TREE)
6491 break;
6492 vexpr = make_node (DEBUG_EXPR_DECL);
6493 tree parm = (**debug_args)[i];
6494 DECL_ARTIFICIAL (vexpr) = 1;
6495 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6496 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6497 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6498 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6499 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6500 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6502 while (i > len);
6505 delete param_body_adjs;
6506 free_dominance_info (CDI_DOMINATORS);
6507 free_dominance_info (CDI_POST_DOMINATORS);
6509 gcc_assert (!id.debug_stmts.exists ());
6510 pop_cfun ();
6511 return;
6514 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6515 the callee and return the inlined body on success. */
6517 tree
6518 maybe_inline_call_in_expr (tree exp)
6520 tree fn = get_callee_fndecl (exp);
6522 /* We can only try to inline "const" functions. */
6523 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6525 call_expr_arg_iterator iter;
6526 copy_body_data id;
6527 tree param, arg, t;
6528 hash_map<tree, tree> decl_map;
6530 /* Remap the parameters. */
6531 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6532 param;
6533 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6534 decl_map.put (param, arg);
6536 memset (&id, 0, sizeof (id));
6537 id.src_fn = fn;
6538 id.dst_fn = current_function_decl;
6539 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6540 id.decl_map = &decl_map;
6542 id.copy_decl = copy_decl_no_change;
6543 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6544 id.transform_new_cfg = false;
6545 id.transform_return_to_modify = true;
6546 id.transform_parameter = true;
6547 id.transform_lang_insert_block = NULL;
6549 /* Make sure not to unshare trees behind the front-end's back
6550 since front-end specific mechanisms may rely on sharing. */
6551 id.regimplify = false;
6552 id.do_not_unshare = true;
6554 /* We're not inside any EH region. */
6555 id.eh_lp_nr = 0;
6557 t = copy_tree_body (&id);
6559 /* We can only return something suitable for use in a GENERIC
6560 expression tree. */
6561 if (TREE_CODE (t) == MODIFY_EXPR)
6562 return TREE_OPERAND (t, 1);
6565 return NULL_TREE;
6568 /* Duplicate a type, fields and all. */
6570 tree
6571 build_duplicate_type (tree type)
6573 struct copy_body_data id;
6575 memset (&id, 0, sizeof (id));
6576 id.src_fn = current_function_decl;
6577 id.dst_fn = current_function_decl;
6578 id.src_cfun = cfun;
6579 id.decl_map = new hash_map<tree, tree>;
6580 id.debug_map = NULL;
6581 id.copy_decl = copy_decl_no_change;
6583 type = remap_type_1 (type, &id);
6585 delete id.decl_map;
6586 if (id.debug_map)
6587 delete id.debug_map;
6589 TYPE_CANONICAL (type) = type;
6591 return type;
6594 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6595 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6596 evaluation. */
6598 tree
6599 copy_fn (tree fn, tree& parms, tree& result)
6601 copy_body_data id;
6602 tree param;
6603 hash_map<tree, tree> decl_map;
6605 tree *p = &parms;
6606 *p = NULL_TREE;
6608 memset (&id, 0, sizeof (id));
6609 id.src_fn = fn;
6610 id.dst_fn = current_function_decl;
6611 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6612 id.decl_map = &decl_map;
6614 id.copy_decl = copy_decl_no_change;
6615 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6616 id.transform_new_cfg = false;
6617 id.transform_return_to_modify = false;
6618 id.transform_parameter = true;
6619 id.transform_lang_insert_block = NULL;
6621 /* Make sure not to unshare trees behind the front-end's back
6622 since front-end specific mechanisms may rely on sharing. */
6623 id.regimplify = false;
6624 id.do_not_unshare = true;
6625 id.do_not_fold = true;
6627 /* We're not inside any EH region. */
6628 id.eh_lp_nr = 0;
6630 /* Remap the parameters and result and return them to the caller. */
6631 for (param = DECL_ARGUMENTS (fn);
6632 param;
6633 param = DECL_CHAIN (param))
6635 *p = remap_decl (param, &id);
6636 p = &DECL_CHAIN (*p);
6639 if (DECL_RESULT (fn))
6640 result = remap_decl (DECL_RESULT (fn), &id);
6641 else
6642 result = NULL_TREE;
6644 return copy_tree_body (&id);