libgo: update to Go 1.15.4 release
[official-gcc.git] / gcc / tree-inline.c
blob32424b169c7310c03168baf43cc56a8b6ef2f15b
1 /* Tree inlining.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64 #include "alloc-pool.h"
65 #include "symbol-summary.h"
66 #include "symtab-thunks.h"
67 #include "symtab-clones.h"
69 /* I'm not real happy about this, but we need to handle gimple and
70 non-gimple trees. */
72 /* Inlining, Cloning, Versioning, Parallelization
74 Inlining: a function body is duplicated, but the PARM_DECLs are
75 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
76 MODIFY_EXPRs that store to a dedicated returned-value variable.
77 The duplicated eh_region info of the copy will later be appended
78 to the info for the caller; the eh_region info in copied throwing
79 statements and RESX statements are adjusted accordingly.
81 Cloning: (only in C++) We have one body for a con/de/structor, and
82 multiple function decls, each with a unique parameter list.
83 Duplicate the body, using the given splay tree; some parameters
84 will become constants (like 0 or 1).
86 Versioning: a function body is duplicated and the result is a new
87 function rather than into blocks of an existing function as with
88 inlining. Some parameters will become constants.
90 Parallelization: a region of a function is duplicated resulting in
91 a new function. Variables may be replaced with complex expressions
92 to enable shared variable semantics.
94 All of these will simultaneously lookup any callgraph edges. If
95 we're going to inline the duplicated function body, and the given
96 function has some cloned callgraph nodes (one for each place this
97 function will be inlined) those callgraph edges will be duplicated.
98 If we're cloning the body, those callgraph edges will be
99 updated to point into the new body. (Note that the original
100 callgraph node and edge list will not be altered.)
102 See the CALL_EXPR handling case in copy_tree_body_r (). */
104 /* To Do:
106 o In order to make inlining-on-trees work, we pessimized
107 function-local static constants. In particular, they are now
108 always output, even when not addressed. Fix this by treating
109 function-local static constants just like global static
110 constants; the back-end already knows not to output them if they
111 are not needed.
113 o Provide heuristics to clamp inlining of recursive template
114 calls? */
117 /* Weights that estimate_num_insns uses to estimate the size of the
118 produced code. */
120 eni_weights eni_size_weights;
122 /* Weights that estimate_num_insns uses to estimate the time necessary
123 to execute the produced code. */
125 eni_weights eni_time_weights;
127 /* Prototypes. */
129 static tree declare_return_variable (copy_body_data *, tree, tree,
130 basic_block);
131 static void remap_block (tree *, copy_body_data *);
132 static void copy_bind_expr (tree *, int *, copy_body_data *);
133 static void declare_inline_vars (tree, tree);
134 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
135 static void prepend_lexical_block (tree current_block, tree new_block);
136 static tree copy_result_decl_to_var (tree, copy_body_data *);
137 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
138 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
139 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
141 /* Insert a tree->tree mapping for ID. Despite the name suggests
142 that the trees should be variables, it is used for more than that. */
144 void
145 insert_decl_map (copy_body_data *id, tree key, tree value)
147 id->decl_map->put (key, value);
149 /* Always insert an identity map as well. If we see this same new
150 node again, we won't want to duplicate it a second time. */
151 if (key != value)
152 id->decl_map->put (value, value);
155 /* Insert a tree->tree mapping for ID. This is only used for
156 variables. */
158 static void
159 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
161 if (!gimple_in_ssa_p (id->src_cfun))
162 return;
164 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
165 return;
167 if (!target_for_debug_bind (key))
168 return;
170 gcc_assert (TREE_CODE (key) == PARM_DECL);
171 gcc_assert (VAR_P (value));
173 if (!id->debug_map)
174 id->debug_map = new hash_map<tree, tree>;
176 id->debug_map->put (key, value);
179 /* If nonzero, we're remapping the contents of inlined debug
180 statements. If negative, an error has occurred, such as a
181 reference to a variable that isn't available in the inlined
182 context. */
183 static int processing_debug_stmt = 0;
185 /* Construct new SSA name for old NAME. ID is the inline context. */
187 static tree
188 remap_ssa_name (tree name, copy_body_data *id)
190 tree new_tree, var;
191 tree *n;
193 gcc_assert (TREE_CODE (name) == SSA_NAME);
195 n = id->decl_map->get (name);
196 if (n)
198 /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
199 remove an unused LHS from a call statement. Such LHS can however
200 still appear in debug statements, but their value is lost in this
201 function and we do not want to map them. */
202 if (id->killed_new_ssa_names
203 && id->killed_new_ssa_names->contains (*n))
205 gcc_assert (processing_debug_stmt);
206 processing_debug_stmt = -1;
207 return name;
210 return unshare_expr (*n);
213 if (processing_debug_stmt)
215 if (SSA_NAME_IS_DEFAULT_DEF (name)
216 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
217 && id->entry_bb == NULL
218 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
220 tree vexpr = make_node (DEBUG_EXPR_DECL);
221 gimple *def_temp;
222 gimple_stmt_iterator gsi;
223 tree val = SSA_NAME_VAR (name);
225 n = id->decl_map->get (val);
226 if (n != NULL)
227 val = *n;
228 if (TREE_CODE (val) != PARM_DECL
229 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
231 processing_debug_stmt = -1;
232 return name;
234 n = id->decl_map->get (val);
235 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
236 return *n;
237 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
238 DECL_ARTIFICIAL (vexpr) = 1;
239 TREE_TYPE (vexpr) = TREE_TYPE (name);
240 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
241 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
242 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
243 insert_decl_map (id, val, vexpr);
244 return vexpr;
247 processing_debug_stmt = -1;
248 return name;
251 /* Remap anonymous SSA names or SSA names of anonymous decls. */
252 var = SSA_NAME_VAR (name);
253 if (!var
254 || (!SSA_NAME_IS_DEFAULT_DEF (name)
255 && VAR_P (var)
256 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
257 && DECL_ARTIFICIAL (var)
258 && DECL_IGNORED_P (var)
259 && !DECL_NAME (var)))
261 struct ptr_info_def *pi;
262 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
263 if (!var && SSA_NAME_IDENTIFIER (name))
264 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
265 insert_decl_map (id, name, new_tree);
266 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
267 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
268 /* At least IPA points-to info can be directly transferred. */
269 if (id->src_cfun->gimple_df
270 && id->src_cfun->gimple_df->ipa_pta
271 && POINTER_TYPE_P (TREE_TYPE (name))
272 && (pi = SSA_NAME_PTR_INFO (name))
273 && !pi->pt.anything)
275 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
276 new_pi->pt = pi->pt;
278 /* So can range-info. */
279 if (!POINTER_TYPE_P (TREE_TYPE (name))
280 && SSA_NAME_RANGE_INFO (name))
281 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
282 SSA_NAME_RANGE_INFO (name));
283 return new_tree;
286 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
287 in copy_bb. */
288 new_tree = remap_decl (var, id);
290 /* We might've substituted constant or another SSA_NAME for
291 the variable.
293 Replace the SSA name representing RESULT_DECL by variable during
294 inlining: this saves us from need to introduce PHI node in a case
295 return value is just partly initialized. */
296 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
297 && (!SSA_NAME_VAR (name)
298 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
299 || !id->transform_return_to_modify))
301 struct ptr_info_def *pi;
302 new_tree = make_ssa_name (new_tree);
303 insert_decl_map (id, name, new_tree);
304 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
305 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
306 /* At least IPA points-to info can be directly transferred. */
307 if (id->src_cfun->gimple_df
308 && id->src_cfun->gimple_df->ipa_pta
309 && POINTER_TYPE_P (TREE_TYPE (name))
310 && (pi = SSA_NAME_PTR_INFO (name))
311 && !pi->pt.anything)
313 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
314 new_pi->pt = pi->pt;
316 /* So can range-info. */
317 if (!POINTER_TYPE_P (TREE_TYPE (name))
318 && SSA_NAME_RANGE_INFO (name))
319 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
320 SSA_NAME_RANGE_INFO (name));
321 if (SSA_NAME_IS_DEFAULT_DEF (name))
323 /* By inlining function having uninitialized variable, we might
324 extend the lifetime (variable might get reused). This cause
325 ICE in the case we end up extending lifetime of SSA name across
326 abnormal edge, but also increase register pressure.
328 We simply initialize all uninitialized vars by 0 except
329 for case we are inlining to very first BB. We can avoid
330 this for all BBs that are not inside strongly connected
331 regions of the CFG, but this is expensive to test. */
332 if (id->entry_bb
333 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
334 && (!SSA_NAME_VAR (name)
335 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
336 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
337 0)->dest
338 || EDGE_COUNT (id->entry_bb->preds) != 1))
340 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
341 gimple *init_stmt;
342 tree zero = build_zero_cst (TREE_TYPE (new_tree));
344 init_stmt = gimple_build_assign (new_tree, zero);
345 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
346 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
348 else
350 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
351 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
355 else
356 insert_decl_map (id, name, new_tree);
357 return new_tree;
360 /* Remap DECL during the copying of the BLOCK tree for the function. */
362 tree
363 remap_decl (tree decl, copy_body_data *id)
365 tree *n;
367 /* We only remap local variables in the current function. */
369 /* See if we have remapped this declaration. */
371 n = id->decl_map->get (decl);
373 if (!n && processing_debug_stmt)
375 processing_debug_stmt = -1;
376 return decl;
379 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
380 necessary DECLs have already been remapped and we do not want to duplicate
381 a decl coming from outside of the sequence we are copying. */
382 if (!n
383 && id->prevent_decl_creation_for_types
384 && id->remapping_type_depth > 0
385 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
386 return decl;
388 /* If we didn't already have an equivalent for this declaration, create one
389 now. */
390 if (!n)
392 /* Make a copy of the variable or label. */
393 tree t = id->copy_decl (decl, id);
395 /* Remember it, so that if we encounter this local entity again
396 we can reuse this copy. Do this early because remap_type may
397 need this decl for TYPE_STUB_DECL. */
398 insert_decl_map (id, decl, t);
400 if (!DECL_P (t))
401 return t;
403 /* Remap types, if necessary. */
404 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
405 if (TREE_CODE (t) == TYPE_DECL)
407 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
409 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
410 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
411 is not set on the TYPE_DECL, for example in LTO mode. */
412 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
414 tree x = build_variant_type_copy (TREE_TYPE (t));
415 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
416 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
417 DECL_ORIGINAL_TYPE (t) = x;
421 /* Remap sizes as necessary. */
422 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
423 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
425 /* If fields, do likewise for offset and qualifier. */
426 if (TREE_CODE (t) == FIELD_DECL)
428 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
429 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
430 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
433 return t;
436 if (id->do_not_unshare)
437 return *n;
438 else
439 return unshare_expr (*n);
442 static tree
443 remap_type_1 (tree type, copy_body_data *id)
445 tree new_tree, t;
447 /* We do need a copy. build and register it now. If this is a pointer or
448 reference type, remap the designated type and make a new pointer or
449 reference type. */
450 if (TREE_CODE (type) == POINTER_TYPE)
452 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
453 TYPE_MODE (type),
454 TYPE_REF_CAN_ALIAS_ALL (type));
455 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
456 new_tree = build_type_attribute_qual_variant (new_tree,
457 TYPE_ATTRIBUTES (type),
458 TYPE_QUALS (type));
459 insert_decl_map (id, type, new_tree);
460 return new_tree;
462 else if (TREE_CODE (type) == REFERENCE_TYPE)
464 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
465 TYPE_MODE (type),
466 TYPE_REF_CAN_ALIAS_ALL (type));
467 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
468 new_tree = build_type_attribute_qual_variant (new_tree,
469 TYPE_ATTRIBUTES (type),
470 TYPE_QUALS (type));
471 insert_decl_map (id, type, new_tree);
472 return new_tree;
474 else
475 new_tree = copy_node (type);
477 insert_decl_map (id, type, new_tree);
479 /* This is a new type, not a copy of an old type. Need to reassociate
480 variants. We can handle everything except the main variant lazily. */
481 t = TYPE_MAIN_VARIANT (type);
482 if (type != t)
484 t = remap_type (t, id);
485 TYPE_MAIN_VARIANT (new_tree) = t;
486 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
487 TYPE_NEXT_VARIANT (t) = new_tree;
489 else
491 TYPE_MAIN_VARIANT (new_tree) = new_tree;
492 TYPE_NEXT_VARIANT (new_tree) = NULL;
495 if (TYPE_STUB_DECL (type))
496 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
498 /* Lazily create pointer and reference types. */
499 TYPE_POINTER_TO (new_tree) = NULL;
500 TYPE_REFERENCE_TO (new_tree) = NULL;
502 /* Copy all types that may contain references to local variables; be sure to
503 preserve sharing in between type and its main variant when possible. */
504 switch (TREE_CODE (new_tree))
506 case INTEGER_TYPE:
507 case REAL_TYPE:
508 case FIXED_POINT_TYPE:
509 case ENUMERAL_TYPE:
510 case BOOLEAN_TYPE:
511 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
513 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
514 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
516 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
517 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
519 else
521 t = TYPE_MIN_VALUE (new_tree);
522 if (t && TREE_CODE (t) != INTEGER_CST)
523 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
525 t = TYPE_MAX_VALUE (new_tree);
526 if (t && TREE_CODE (t) != INTEGER_CST)
527 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
529 return new_tree;
531 case FUNCTION_TYPE:
532 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
533 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
534 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
535 else
536 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
537 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
538 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
539 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
540 else
541 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
542 return new_tree;
544 case ARRAY_TYPE:
545 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
546 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
547 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
548 else
549 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
551 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
553 gcc_checking_assert (TYPE_DOMAIN (type)
554 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
555 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
557 else
559 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
560 /* For array bounds where we have decided not to copy over the bounds
561 variable which isn't used in OpenMP/OpenACC region, change them to
562 an uninitialized VAR_DECL temporary. */
563 if (id->adjust_array_error_bounds
564 && TYPE_DOMAIN (new_tree)
565 && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
566 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
568 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
569 DECL_ATTRIBUTES (v)
570 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
571 DECL_ATTRIBUTES (v));
572 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
575 break;
577 case RECORD_TYPE:
578 case UNION_TYPE:
579 case QUAL_UNION_TYPE:
580 if (TYPE_MAIN_VARIANT (type) != type
581 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
582 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
583 else
585 tree f, nf = NULL;
587 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
589 t = remap_decl (f, id);
590 DECL_CONTEXT (t) = new_tree;
591 DECL_CHAIN (t) = nf;
592 nf = t;
594 TYPE_FIELDS (new_tree) = nreverse (nf);
596 break;
598 case OFFSET_TYPE:
599 default:
600 /* Shouldn't have been thought variable sized. */
601 gcc_unreachable ();
604 /* All variants of type share the same size, so use the already remaped data. */
605 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
607 tree s = TYPE_SIZE (type);
608 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
609 tree su = TYPE_SIZE_UNIT (type);
610 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
611 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
612 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
613 || s == mvs);
614 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
615 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
616 || su == mvsu);
617 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
618 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
620 else
622 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
623 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
626 return new_tree;
629 /* Helper function for remap_type_2, called through walk_tree. */
631 static tree
632 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
634 copy_body_data *id = (copy_body_data *) data;
636 if (TYPE_P (*tp))
637 *walk_subtrees = 0;
639 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
640 return *tp;
642 return NULL_TREE;
645 /* Return true if TYPE needs to be remapped because remap_decl on any
646 needed embedded decl returns something other than that decl. */
648 static bool
649 remap_type_2 (tree type, copy_body_data *id)
651 tree t;
653 #define RETURN_TRUE_IF_VAR(T) \
654 do \
656 tree _t = (T); \
657 if (_t) \
659 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
660 return true; \
661 if (!TYPE_SIZES_GIMPLIFIED (type) \
662 && walk_tree (&_t, remap_type_3, id, NULL)) \
663 return true; \
666 while (0)
668 switch (TREE_CODE (type))
670 case POINTER_TYPE:
671 case REFERENCE_TYPE:
672 case FUNCTION_TYPE:
673 case METHOD_TYPE:
674 return remap_type_2 (TREE_TYPE (type), id);
676 case INTEGER_TYPE:
677 case REAL_TYPE:
678 case FIXED_POINT_TYPE:
679 case ENUMERAL_TYPE:
680 case BOOLEAN_TYPE:
681 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
682 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
683 return false;
685 case ARRAY_TYPE:
686 if (remap_type_2 (TREE_TYPE (type), id)
687 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
688 return true;
689 break;
691 case RECORD_TYPE:
692 case UNION_TYPE:
693 case QUAL_UNION_TYPE:
694 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
695 if (TREE_CODE (t) == FIELD_DECL)
697 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
698 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
699 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
700 if (TREE_CODE (type) == QUAL_UNION_TYPE)
701 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
703 break;
705 default:
706 return false;
709 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
710 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
711 return false;
712 #undef RETURN_TRUE_IF_VAR
715 tree
716 remap_type (tree type, copy_body_data *id)
718 tree *node;
719 tree tmp;
721 if (type == NULL)
722 return type;
724 /* See if we have remapped this type. */
725 node = id->decl_map->get (type);
726 if (node)
727 return *node;
729 /* The type only needs remapping if it's variably modified. */
730 if (! variably_modified_type_p (type, id->src_fn)
731 /* Don't remap if copy_decl method doesn't always return a new
732 decl and for all embedded decls returns the passed in decl. */
733 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
735 insert_decl_map (id, type, type);
736 return type;
739 id->remapping_type_depth++;
740 tmp = remap_type_1 (type, id);
741 id->remapping_type_depth--;
743 return tmp;
746 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
748 static bool
749 can_be_nonlocal (tree decl, copy_body_data *id)
751 /* We cannot duplicate function decls. */
752 if (TREE_CODE (decl) == FUNCTION_DECL)
753 return true;
755 /* Local static vars must be non-local or we get multiple declaration
756 problems. */
757 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
758 return true;
760 return false;
763 static tree
764 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
765 copy_body_data *id)
767 tree old_var;
768 tree new_decls = NULL_TREE;
770 /* Remap its variables. */
771 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
773 tree new_var;
775 if (can_be_nonlocal (old_var, id))
777 /* We need to add this variable to the local decls as otherwise
778 nothing else will do so. */
779 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
780 add_local_decl (cfun, old_var);
781 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
782 && !DECL_IGNORED_P (old_var)
783 && nonlocalized_list)
784 vec_safe_push (*nonlocalized_list, old_var);
785 continue;
788 /* Remap the variable. */
789 new_var = remap_decl (old_var, id);
791 /* If we didn't remap this variable, we can't mess with its
792 TREE_CHAIN. If we remapped this variable to the return slot, it's
793 already declared somewhere else, so don't declare it here. */
795 if (new_var == id->retvar)
797 else if (!new_var)
799 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
800 && !DECL_IGNORED_P (old_var)
801 && nonlocalized_list)
802 vec_safe_push (*nonlocalized_list, old_var);
804 else
806 gcc_assert (DECL_P (new_var));
807 DECL_CHAIN (new_var) = new_decls;
808 new_decls = new_var;
810 /* Also copy value-expressions. */
811 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
813 tree tem = DECL_VALUE_EXPR (new_var);
814 bool old_regimplify = id->regimplify;
815 id->remapping_type_depth++;
816 walk_tree (&tem, copy_tree_body_r, id, NULL);
817 id->remapping_type_depth--;
818 id->regimplify = old_regimplify;
819 SET_DECL_VALUE_EXPR (new_var, tem);
824 return nreverse (new_decls);
827 /* Copy the BLOCK to contain remapped versions of the variables
828 therein. And hook the new block into the block-tree. */
830 static void
831 remap_block (tree *block, copy_body_data *id)
833 tree old_block;
834 tree new_block;
836 /* Make the new block. */
837 old_block = *block;
838 new_block = make_node (BLOCK);
839 TREE_USED (new_block) = TREE_USED (old_block);
840 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
841 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
842 BLOCK_NONLOCALIZED_VARS (new_block)
843 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
844 *block = new_block;
846 /* Remap its variables. */
847 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
848 &BLOCK_NONLOCALIZED_VARS (new_block),
849 id);
851 if (id->transform_lang_insert_block)
852 id->transform_lang_insert_block (new_block);
854 /* Remember the remapped block. */
855 insert_decl_map (id, old_block, new_block);
858 /* Copy the whole block tree and root it in id->block. */
860 static tree
861 remap_blocks (tree block, copy_body_data *id)
863 tree t;
864 tree new_tree = block;
866 if (!block)
867 return NULL;
869 remap_block (&new_tree, id);
870 gcc_assert (new_tree != block);
871 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
872 prepend_lexical_block (new_tree, remap_blocks (t, id));
873 /* Blocks are in arbitrary order, but make things slightly prettier and do
874 not swap order when producing a copy. */
875 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
876 return new_tree;
879 /* Remap the block tree rooted at BLOCK to nothing. */
881 static void
882 remap_blocks_to_null (tree block, copy_body_data *id)
884 tree t;
885 insert_decl_map (id, block, NULL_TREE);
886 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
887 remap_blocks_to_null (t, id);
890 /* Remap the location info pointed to by LOCUS. */
892 static location_t
893 remap_location (location_t locus, copy_body_data *id)
895 if (LOCATION_BLOCK (locus))
897 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
898 gcc_assert (n);
899 if (*n)
900 return set_block (locus, *n);
903 locus = LOCATION_LOCUS (locus);
905 if (locus != UNKNOWN_LOCATION && id->block)
906 return set_block (locus, id->block);
908 return locus;
911 static void
912 copy_statement_list (tree *tp)
914 tree_stmt_iterator oi, ni;
915 tree new_tree;
917 new_tree = alloc_stmt_list ();
918 ni = tsi_start (new_tree);
919 oi = tsi_start (*tp);
920 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
921 *tp = new_tree;
923 for (; !tsi_end_p (oi); tsi_next (&oi))
925 tree stmt = tsi_stmt (oi);
926 if (TREE_CODE (stmt) == STATEMENT_LIST)
927 /* This copy is not redundant; tsi_link_after will smash this
928 STATEMENT_LIST into the end of the one we're building, and we
929 don't want to do that with the original. */
930 copy_statement_list (&stmt);
931 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
935 static void
936 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
938 tree block = BIND_EXPR_BLOCK (*tp);
939 /* Copy (and replace) the statement. */
940 copy_tree_r (tp, walk_subtrees, NULL);
941 if (block)
943 remap_block (&block, id);
944 BIND_EXPR_BLOCK (*tp) = block;
947 if (BIND_EXPR_VARS (*tp))
948 /* This will remap a lot of the same decls again, but this should be
949 harmless. */
950 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
954 /* Create a new gimple_seq by remapping all the statements in BODY
955 using the inlining information in ID. */
957 static gimple_seq
958 remap_gimple_seq (gimple_seq body, copy_body_data *id)
960 gimple_stmt_iterator si;
961 gimple_seq new_body = NULL;
963 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
965 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
966 gimple_seq_add_seq (&new_body, new_stmts);
969 return new_body;
973 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
974 block using the mapping information in ID. */
976 static gimple *
977 copy_gimple_bind (gbind *stmt, copy_body_data *id)
979 gimple *new_bind;
980 tree new_block, new_vars;
981 gimple_seq body, new_body;
983 /* Copy the statement. Note that we purposely don't use copy_stmt
984 here because we need to remap statements as we copy. */
985 body = gimple_bind_body (stmt);
986 new_body = remap_gimple_seq (body, id);
988 new_block = gimple_bind_block (stmt);
989 if (new_block)
990 remap_block (&new_block, id);
992 /* This will remap a lot of the same decls again, but this should be
993 harmless. */
994 new_vars = gimple_bind_vars (stmt);
995 if (new_vars)
996 new_vars = remap_decls (new_vars, NULL, id);
998 new_bind = gimple_build_bind (new_vars, new_body, new_block);
1000 return new_bind;
1003 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
1005 static bool
1006 is_parm (tree decl)
1008 if (TREE_CODE (decl) == SSA_NAME)
1010 decl = SSA_NAME_VAR (decl);
1011 if (!decl)
1012 return false;
1015 return (TREE_CODE (decl) == PARM_DECL);
1018 /* Remap the dependence CLIQUE from the source to the destination function
1019 as specified in ID. */
1021 static unsigned short
1022 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1024 if (clique == 0 || processing_debug_stmt)
1025 return 0;
1026 if (!id->dependence_map)
1027 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1028 bool existed;
1029 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1030 if (!existed)
1032 /* Clique 1 is reserved for local ones set by PTA. */
1033 if (cfun->last_clique == 0)
1034 cfun->last_clique = 1;
1035 newc = ++cfun->last_clique;
1037 return newc;
1040 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1041 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1042 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1043 recursing into the children nodes of *TP. */
1045 static tree
1046 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1048 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1049 copy_body_data *id = (copy_body_data *) wi_p->info;
1050 tree fn = id->src_fn;
1052 /* For recursive invocations this is no longer the LHS itself. */
1053 bool is_lhs = wi_p->is_lhs;
1054 wi_p->is_lhs = false;
1056 if (TREE_CODE (*tp) == SSA_NAME)
1058 *tp = remap_ssa_name (*tp, id);
1059 *walk_subtrees = 0;
1060 if (is_lhs)
1061 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1062 return NULL;
1064 else if (auto_var_in_fn_p (*tp, fn))
1066 /* Local variables and labels need to be replaced by equivalent
1067 variables. We don't want to copy static variables; there's
1068 only one of those, no matter how many times we inline the
1069 containing function. Similarly for globals from an outer
1070 function. */
1071 tree new_decl;
1073 /* Remap the declaration. */
1074 new_decl = remap_decl (*tp, id);
1075 gcc_assert (new_decl);
1076 /* Replace this variable with the copy. */
1077 STRIP_TYPE_NOPS (new_decl);
1078 /* ??? The C++ frontend uses void * pointer zero to initialize
1079 any other type. This confuses the middle-end type verification.
1080 As cloned bodies do not go through gimplification again the fixup
1081 there doesn't trigger. */
1082 if (TREE_CODE (new_decl) == INTEGER_CST
1083 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1084 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1085 *tp = new_decl;
1086 *walk_subtrees = 0;
1088 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1089 gcc_unreachable ();
1090 else if (TREE_CODE (*tp) == SAVE_EXPR)
1091 gcc_unreachable ();
1092 else if (TREE_CODE (*tp) == LABEL_DECL
1093 && (!DECL_CONTEXT (*tp)
1094 || decl_function_context (*tp) == id->src_fn))
1095 /* These may need to be remapped for EH handling. */
1096 *tp = remap_decl (*tp, id);
1097 else if (TREE_CODE (*tp) == FIELD_DECL)
1099 /* If the enclosing record type is variably_modified_type_p, the field
1100 has already been remapped. Otherwise, it need not be. */
1101 tree *n = id->decl_map->get (*tp);
1102 if (n)
1103 *tp = *n;
1104 *walk_subtrees = 0;
1106 else if (TYPE_P (*tp))
1107 /* Types may need remapping as well. */
1108 *tp = remap_type (*tp, id);
1109 else if (CONSTANT_CLASS_P (*tp))
1111 /* If this is a constant, we have to copy the node iff the type
1112 will be remapped. copy_tree_r will not copy a constant. */
1113 tree new_type = remap_type (TREE_TYPE (*tp), id);
1115 if (new_type == TREE_TYPE (*tp))
1116 *walk_subtrees = 0;
1118 else if (TREE_CODE (*tp) == INTEGER_CST)
1119 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1120 else
1122 *tp = copy_node (*tp);
1123 TREE_TYPE (*tp) = new_type;
1126 else
1128 /* Otherwise, just copy the node. Note that copy_tree_r already
1129 knows not to copy VAR_DECLs, etc., so this is safe. */
1131 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1133 /* We need to re-canonicalize MEM_REFs from inline substitutions
1134 that can happen when a pointer argument is an ADDR_EXPR.
1135 Recurse here manually to allow that. */
1136 tree ptr = TREE_OPERAND (*tp, 0);
1137 tree type = remap_type (TREE_TYPE (*tp), id);
1138 tree old = *tp;
1139 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1140 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1141 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1142 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1143 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1144 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1146 MR_DEPENDENCE_CLIQUE (*tp)
1147 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1148 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1150 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1151 remapped a parameter as the property might be valid only
1152 for the parameter itself. */
1153 if (TREE_THIS_NOTRAP (old)
1154 && (!is_parm (TREE_OPERAND (old, 0))
1155 || (!id->transform_parameter && is_parm (ptr))))
1156 TREE_THIS_NOTRAP (*tp) = 1;
1157 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1158 *walk_subtrees = 0;
1159 return NULL;
1162 /* Here is the "usual case". Copy this tree node, and then
1163 tweak some special cases. */
1164 copy_tree_r (tp, walk_subtrees, NULL);
1166 if (TREE_CODE (*tp) != OMP_CLAUSE)
1167 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1169 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1171 /* The copied TARGET_EXPR has never been expanded, even if the
1172 original node was expanded already. */
1173 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1174 TREE_OPERAND (*tp, 3) = NULL_TREE;
1176 else if (TREE_CODE (*tp) == ADDR_EXPR)
1178 /* Variable substitution need not be simple. In particular,
1179 the MEM_REF substitution above. Make sure that
1180 TREE_CONSTANT and friends are up-to-date. */
1181 int invariant = is_gimple_min_invariant (*tp);
1182 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1183 recompute_tree_invariant_for_addr_expr (*tp);
1185 /* If this used to be invariant, but is not any longer,
1186 then regimplification is probably needed. */
1187 if (invariant && !is_gimple_min_invariant (*tp))
1188 id->regimplify = true;
1190 *walk_subtrees = 0;
1194 /* Update the TREE_BLOCK for the cloned expr. */
1195 if (EXPR_P (*tp))
1197 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1198 tree old_block = TREE_BLOCK (*tp);
1199 if (old_block)
1201 tree *n;
1202 n = id->decl_map->get (TREE_BLOCK (*tp));
1203 if (n)
1204 new_block = *n;
1206 TREE_SET_BLOCK (*tp, new_block);
1209 /* Keep iterating. */
1210 return NULL_TREE;
1214 /* Called from copy_body_id via walk_tree. DATA is really a
1215 `copy_body_data *'. */
1217 tree
1218 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1220 copy_body_data *id = (copy_body_data *) data;
1221 tree fn = id->src_fn;
1222 tree new_block;
1224 /* Begin by recognizing trees that we'll completely rewrite for the
1225 inlining context. Our output for these trees is completely
1226 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1227 into an edge). Further down, we'll handle trees that get
1228 duplicated and/or tweaked. */
1230 /* When requested, RETURN_EXPRs should be transformed to just the
1231 contained MODIFY_EXPR. The branch semantics of the return will
1232 be handled elsewhere by manipulating the CFG rather than a statement. */
1233 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1235 tree assignment = TREE_OPERAND (*tp, 0);
1237 /* If we're returning something, just turn that into an
1238 assignment into the equivalent of the original RESULT_DECL.
1239 If the "assignment" is just the result decl, the result
1240 decl has already been set (e.g. a recent "foo (&result_decl,
1241 ...)"); just toss the entire RETURN_EXPR. */
1242 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1244 /* Replace the RETURN_EXPR with (a copy of) the
1245 MODIFY_EXPR hanging underneath. */
1246 *tp = copy_node (assignment);
1248 else /* Else the RETURN_EXPR returns no value. */
1250 *tp = NULL;
1251 return (tree) (void *)1;
1254 else if (TREE_CODE (*tp) == SSA_NAME)
1256 *tp = remap_ssa_name (*tp, id);
1257 *walk_subtrees = 0;
1258 return NULL;
1261 /* Local variables and labels need to be replaced by equivalent
1262 variables. We don't want to copy static variables; there's only
1263 one of those, no matter how many times we inline the containing
1264 function. Similarly for globals from an outer function. */
1265 else if (auto_var_in_fn_p (*tp, fn))
1267 tree new_decl;
1269 /* Remap the declaration. */
1270 new_decl = remap_decl (*tp, id);
1271 gcc_assert (new_decl);
1272 /* Replace this variable with the copy. */
1273 STRIP_TYPE_NOPS (new_decl);
1274 *tp = new_decl;
1275 *walk_subtrees = 0;
1277 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1278 copy_statement_list (tp);
1279 else if (TREE_CODE (*tp) == SAVE_EXPR
1280 || TREE_CODE (*tp) == TARGET_EXPR)
1281 remap_save_expr (tp, id->decl_map, walk_subtrees);
1282 else if (TREE_CODE (*tp) == LABEL_DECL
1283 && (! DECL_CONTEXT (*tp)
1284 || decl_function_context (*tp) == id->src_fn))
1285 /* These may need to be remapped for EH handling. */
1286 *tp = remap_decl (*tp, id);
1287 else if (TREE_CODE (*tp) == BIND_EXPR)
1288 copy_bind_expr (tp, walk_subtrees, id);
1289 /* Types may need remapping as well. */
1290 else if (TYPE_P (*tp))
1291 *tp = remap_type (*tp, id);
1293 /* If this is a constant, we have to copy the node iff the type will be
1294 remapped. copy_tree_r will not copy a constant. */
1295 else if (CONSTANT_CLASS_P (*tp))
1297 tree new_type = remap_type (TREE_TYPE (*tp), id);
1299 if (new_type == TREE_TYPE (*tp))
1300 *walk_subtrees = 0;
1302 else if (TREE_CODE (*tp) == INTEGER_CST)
1303 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1304 else
1306 *tp = copy_node (*tp);
1307 TREE_TYPE (*tp) = new_type;
1311 /* Otherwise, just copy the node. Note that copy_tree_r already
1312 knows not to copy VAR_DECLs, etc., so this is safe. */
1313 else
1315 /* Here we handle trees that are not completely rewritten.
1316 First we detect some inlining-induced bogosities for
1317 discarding. */
1318 if (TREE_CODE (*tp) == MODIFY_EXPR
1319 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1320 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1322 /* Some assignments VAR = VAR; don't generate any rtl code
1323 and thus don't count as variable modification. Avoid
1324 keeping bogosities like 0 = 0. */
1325 tree decl = TREE_OPERAND (*tp, 0), value;
1326 tree *n;
1328 n = id->decl_map->get (decl);
1329 if (n)
1331 value = *n;
1332 STRIP_TYPE_NOPS (value);
1333 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1335 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1336 return copy_tree_body_r (tp, walk_subtrees, data);
1340 else if (TREE_CODE (*tp) == INDIRECT_REF)
1342 /* Get rid of *& from inline substitutions that can happen when a
1343 pointer argument is an ADDR_EXPR. */
1344 tree decl = TREE_OPERAND (*tp, 0);
1345 tree *n = id->decl_map->get (decl);
1346 if (n)
1348 /* If we happen to get an ADDR_EXPR in n->value, strip
1349 it manually here as we'll eventually get ADDR_EXPRs
1350 which lie about their types pointed to. In this case
1351 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1352 but we absolutely rely on that. As fold_indirect_ref
1353 does other useful transformations, try that first, though. */
1354 tree type = TREE_TYPE (*tp);
1355 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1356 tree old = *tp;
1357 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1358 if (! *tp)
1360 type = remap_type (type, id);
1361 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1364 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1365 /* ??? We should either assert here or build
1366 a VIEW_CONVERT_EXPR instead of blindly leaking
1367 incompatible types to our IL. */
1368 if (! *tp)
1369 *tp = TREE_OPERAND (ptr, 0);
1371 else
1373 *tp = build1 (INDIRECT_REF, type, ptr);
1374 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1375 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1376 TREE_READONLY (*tp) = TREE_READONLY (old);
1377 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1378 have remapped a parameter as the property might be
1379 valid only for the parameter itself. */
1380 if (TREE_THIS_NOTRAP (old)
1381 && (!is_parm (TREE_OPERAND (old, 0))
1382 || (!id->transform_parameter && is_parm (ptr))))
1383 TREE_THIS_NOTRAP (*tp) = 1;
1386 *walk_subtrees = 0;
1387 return NULL;
1390 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1392 /* We need to re-canonicalize MEM_REFs from inline substitutions
1393 that can happen when a pointer argument is an ADDR_EXPR.
1394 Recurse here manually to allow that. */
1395 tree ptr = TREE_OPERAND (*tp, 0);
1396 tree type = remap_type (TREE_TYPE (*tp), id);
1397 tree old = *tp;
1398 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1399 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1400 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1401 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1402 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1403 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1405 MR_DEPENDENCE_CLIQUE (*tp)
1406 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1407 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1409 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1410 remapped a parameter as the property might be valid only
1411 for the parameter itself. */
1412 if (TREE_THIS_NOTRAP (old)
1413 && (!is_parm (TREE_OPERAND (old, 0))
1414 || (!id->transform_parameter && is_parm (ptr))))
1415 TREE_THIS_NOTRAP (*tp) = 1;
1416 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1417 *walk_subtrees = 0;
1418 return NULL;
1421 /* Here is the "usual case". Copy this tree node, and then
1422 tweak some special cases. */
1423 copy_tree_r (tp, walk_subtrees, NULL);
1425 /* If EXPR has block defined, map it to newly constructed block.
1426 When inlining we want EXPRs without block appear in the block
1427 of function call if we are not remapping a type. */
1428 if (EXPR_P (*tp))
1430 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1431 if (TREE_BLOCK (*tp))
1433 tree *n;
1434 n = id->decl_map->get (TREE_BLOCK (*tp));
1435 if (n)
1436 new_block = *n;
1438 TREE_SET_BLOCK (*tp, new_block);
1441 if (TREE_CODE (*tp) != OMP_CLAUSE)
1442 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1444 /* The copied TARGET_EXPR has never been expanded, even if the
1445 original node was expanded already. */
1446 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1448 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1449 TREE_OPERAND (*tp, 3) = NULL_TREE;
1452 /* Variable substitution need not be simple. In particular, the
1453 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1454 and friends are up-to-date. */
1455 else if (TREE_CODE (*tp) == ADDR_EXPR)
1457 int invariant = is_gimple_min_invariant (*tp);
1458 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1460 /* Handle the case where we substituted an INDIRECT_REF
1461 into the operand of the ADDR_EXPR. */
1462 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1463 && !id->do_not_fold)
1465 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1466 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1467 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1468 *tp = t;
1470 else
1471 recompute_tree_invariant_for_addr_expr (*tp);
1473 /* If this used to be invariant, but is not any longer,
1474 then regimplification is probably needed. */
1475 if (invariant && !is_gimple_min_invariant (*tp))
1476 id->regimplify = true;
1478 *walk_subtrees = 0;
1482 /* Keep iterating. */
1483 return NULL_TREE;
1486 /* Helper for remap_gimple_stmt. Given an EH region number for the
1487 source function, map that to the duplicate EH region number in
1488 the destination function. */
1490 static int
1491 remap_eh_region_nr (int old_nr, copy_body_data *id)
1493 eh_region old_r, new_r;
1495 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1496 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1498 return new_r->index;
1501 /* Similar, but operate on INTEGER_CSTs. */
1503 static tree
1504 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1506 int old_nr, new_nr;
1508 old_nr = tree_to_shwi (old_t_nr);
1509 new_nr = remap_eh_region_nr (old_nr, id);
1511 return build_int_cst (integer_type_node, new_nr);
1514 /* Helper for copy_bb. Remap statement STMT using the inlining
1515 information in ID. Return the new statement copy. */
1517 static gimple_seq
1518 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1520 gimple *copy = NULL;
1521 struct walk_stmt_info wi;
1522 bool skip_first = false;
1523 gimple_seq stmts = NULL;
1525 if (is_gimple_debug (stmt)
1526 && (gimple_debug_nonbind_marker_p (stmt)
1527 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1528 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1529 return NULL;
1531 /* Begin by recognizing trees that we'll completely rewrite for the
1532 inlining context. Our output for these trees is completely
1533 different from our input (e.g. RETURN_EXPR is deleted and morphs
1534 into an edge). Further down, we'll handle trees that get
1535 duplicated and/or tweaked. */
1537 /* When requested, GIMPLE_RETURN should be transformed to just the
1538 contained GIMPLE_ASSIGN. The branch semantics of the return will
1539 be handled elsewhere by manipulating the CFG rather than the
1540 statement. */
1541 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1543 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1545 /* If we're returning something, just turn that into an
1546 assignment to the equivalent of the original RESULT_DECL.
1547 If RETVAL is just the result decl, the result decl has
1548 already been set (e.g. a recent "foo (&result_decl, ...)");
1549 just toss the entire GIMPLE_RETURN. Likewise for when the
1550 call doesn't want the return value. */
1551 if (retval
1552 && (TREE_CODE (retval) != RESULT_DECL
1553 && (!id->call_stmt
1554 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1555 && (TREE_CODE (retval) != SSA_NAME
1556 || ! SSA_NAME_VAR (retval)
1557 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1559 copy = gimple_build_assign (id->do_not_unshare
1560 ? id->retvar : unshare_expr (id->retvar),
1561 retval);
1562 /* id->retvar is already substituted. Skip it on later remapping. */
1563 skip_first = true;
1565 else
1566 return NULL;
1568 else if (gimple_has_substatements (stmt))
1570 gimple_seq s1, s2;
1572 /* When cloning bodies from the C++ front end, we will be handed bodies
1573 in High GIMPLE form. Handle here all the High GIMPLE statements that
1574 have embedded statements. */
1575 switch (gimple_code (stmt))
1577 case GIMPLE_BIND:
1578 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1579 break;
1581 case GIMPLE_CATCH:
1583 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1584 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1585 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1587 break;
1589 case GIMPLE_EH_FILTER:
1590 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1591 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1592 break;
1594 case GIMPLE_TRY:
1595 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1596 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1597 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1598 break;
1600 case GIMPLE_WITH_CLEANUP_EXPR:
1601 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1602 copy = gimple_build_wce (s1);
1603 break;
1605 case GIMPLE_OMP_PARALLEL:
1607 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1608 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1609 copy = gimple_build_omp_parallel
1610 (s1,
1611 gimple_omp_parallel_clauses (omp_par_stmt),
1612 gimple_omp_parallel_child_fn (omp_par_stmt),
1613 gimple_omp_parallel_data_arg (omp_par_stmt));
1615 break;
1617 case GIMPLE_OMP_TASK:
1618 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1619 copy = gimple_build_omp_task
1620 (s1,
1621 gimple_omp_task_clauses (stmt),
1622 gimple_omp_task_child_fn (stmt),
1623 gimple_omp_task_data_arg (stmt),
1624 gimple_omp_task_copy_fn (stmt),
1625 gimple_omp_task_arg_size (stmt),
1626 gimple_omp_task_arg_align (stmt));
1627 break;
1629 case GIMPLE_OMP_FOR:
1630 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1631 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1632 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1633 gimple_omp_for_clauses (stmt),
1634 gimple_omp_for_collapse (stmt), s2);
1636 size_t i;
1637 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1639 gimple_omp_for_set_index (copy, i,
1640 gimple_omp_for_index (stmt, i));
1641 gimple_omp_for_set_initial (copy, i,
1642 gimple_omp_for_initial (stmt, i));
1643 gimple_omp_for_set_final (copy, i,
1644 gimple_omp_for_final (stmt, i));
1645 gimple_omp_for_set_incr (copy, i,
1646 gimple_omp_for_incr (stmt, i));
1647 gimple_omp_for_set_cond (copy, i,
1648 gimple_omp_for_cond (stmt, i));
1651 break;
1653 case GIMPLE_OMP_MASTER:
1654 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1655 copy = gimple_build_omp_master (s1);
1656 break;
1658 case GIMPLE_OMP_TASKGROUP:
1659 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1660 copy = gimple_build_omp_taskgroup
1661 (s1, gimple_omp_taskgroup_clauses (stmt));
1662 break;
1664 case GIMPLE_OMP_ORDERED:
1665 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1666 copy = gimple_build_omp_ordered
1667 (s1,
1668 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1669 break;
1671 case GIMPLE_OMP_SCAN:
1672 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1673 copy = gimple_build_omp_scan
1674 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1675 break;
1677 case GIMPLE_OMP_SECTION:
1678 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1679 copy = gimple_build_omp_section (s1);
1680 break;
1682 case GIMPLE_OMP_SECTIONS:
1683 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1684 copy = gimple_build_omp_sections
1685 (s1, gimple_omp_sections_clauses (stmt));
1686 break;
1688 case GIMPLE_OMP_SINGLE:
1689 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1690 copy = gimple_build_omp_single
1691 (s1, gimple_omp_single_clauses (stmt));
1692 break;
1694 case GIMPLE_OMP_TARGET:
1695 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1696 copy = gimple_build_omp_target
1697 (s1, gimple_omp_target_kind (stmt),
1698 gimple_omp_target_clauses (stmt));
1699 break;
1701 case GIMPLE_OMP_TEAMS:
1702 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1703 copy = gimple_build_omp_teams
1704 (s1, gimple_omp_teams_clauses (stmt));
1705 break;
1707 case GIMPLE_OMP_CRITICAL:
1708 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1709 copy = gimple_build_omp_critical (s1,
1710 gimple_omp_critical_name
1711 (as_a <gomp_critical *> (stmt)),
1712 gimple_omp_critical_clauses
1713 (as_a <gomp_critical *> (stmt)));
1714 break;
1716 case GIMPLE_TRANSACTION:
1718 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1719 gtransaction *new_trans_stmt;
1720 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1721 id);
1722 copy = new_trans_stmt = gimple_build_transaction (s1);
1723 gimple_transaction_set_subcode (new_trans_stmt,
1724 gimple_transaction_subcode (old_trans_stmt));
1725 gimple_transaction_set_label_norm (new_trans_stmt,
1726 gimple_transaction_label_norm (old_trans_stmt));
1727 gimple_transaction_set_label_uninst (new_trans_stmt,
1728 gimple_transaction_label_uninst (old_trans_stmt));
1729 gimple_transaction_set_label_over (new_trans_stmt,
1730 gimple_transaction_label_over (old_trans_stmt));
1732 break;
1734 default:
1735 gcc_unreachable ();
1738 else
1740 if (gimple_assign_copy_p (stmt)
1741 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1742 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1744 /* Here we handle statements that are not completely rewritten.
1745 First we detect some inlining-induced bogosities for
1746 discarding. */
1748 /* Some assignments VAR = VAR; don't generate any rtl code
1749 and thus don't count as variable modification. Avoid
1750 keeping bogosities like 0 = 0. */
1751 tree decl = gimple_assign_lhs (stmt), value;
1752 tree *n;
1754 n = id->decl_map->get (decl);
1755 if (n)
1757 value = *n;
1758 STRIP_TYPE_NOPS (value);
1759 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1760 return NULL;
1764 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1765 in a block that we aren't copying during tree_function_versioning,
1766 just drop the clobber stmt. */
1767 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1769 tree lhs = gimple_assign_lhs (stmt);
1770 if (TREE_CODE (lhs) == MEM_REF
1771 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1773 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1774 if (gimple_bb (def_stmt)
1775 && !bitmap_bit_p (id->blocks_to_copy,
1776 gimple_bb (def_stmt)->index))
1777 return NULL;
1781 /* We do not allow CLOBBERs of handled components. In case
1782 returned value is stored via such handled component, remove
1783 the clobber so stmt verifier is happy. */
1784 if (gimple_clobber_p (stmt)
1785 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1787 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1788 if (!DECL_P (remapped)
1789 && TREE_CODE (remapped) != MEM_REF)
1790 return NULL;
1793 if (gimple_debug_bind_p (stmt))
1795 gdebug *copy
1796 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1797 gimple_debug_bind_get_value (stmt),
1798 stmt);
1799 if (id->reset_location)
1800 gimple_set_location (copy, input_location);
1801 id->debug_stmts.safe_push (copy);
1802 gimple_seq_add_stmt (&stmts, copy);
1803 return stmts;
1805 if (gimple_debug_source_bind_p (stmt))
1807 gdebug *copy = gimple_build_debug_source_bind
1808 (gimple_debug_source_bind_get_var (stmt),
1809 gimple_debug_source_bind_get_value (stmt),
1810 stmt);
1811 if (id->reset_location)
1812 gimple_set_location (copy, input_location);
1813 id->debug_stmts.safe_push (copy);
1814 gimple_seq_add_stmt (&stmts, copy);
1815 return stmts;
1817 if (gimple_debug_nonbind_marker_p (stmt))
1819 /* If the inlined function has too many debug markers,
1820 don't copy them. */
1821 if (id->src_cfun->debug_marker_count
1822 > param_max_debug_marker_count)
1823 return stmts;
1825 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1826 if (id->reset_location)
1827 gimple_set_location (copy, input_location);
1828 id->debug_stmts.safe_push (copy);
1829 gimple_seq_add_stmt (&stmts, copy);
1830 return stmts;
1833 /* Create a new deep copy of the statement. */
1834 copy = gimple_copy (stmt);
1836 /* Clear flags that need revisiting. */
1837 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1839 if (gimple_call_tail_p (call_stmt))
1840 gimple_call_set_tail (call_stmt, false);
1841 if (gimple_call_from_thunk_p (call_stmt))
1842 gimple_call_set_from_thunk (call_stmt, false);
1843 if (gimple_call_internal_p (call_stmt))
1844 switch (gimple_call_internal_fn (call_stmt))
1846 case IFN_GOMP_SIMD_LANE:
1847 case IFN_GOMP_SIMD_VF:
1848 case IFN_GOMP_SIMD_LAST_LANE:
1849 case IFN_GOMP_SIMD_ORDERED_START:
1850 case IFN_GOMP_SIMD_ORDERED_END:
1851 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1852 break;
1853 default:
1854 break;
1858 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1859 RESX and EH_DISPATCH. */
1860 if (id->eh_map)
1861 switch (gimple_code (copy))
1863 case GIMPLE_CALL:
1865 tree r, fndecl = gimple_call_fndecl (copy);
1866 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1867 switch (DECL_FUNCTION_CODE (fndecl))
1869 case BUILT_IN_EH_COPY_VALUES:
1870 r = gimple_call_arg (copy, 1);
1871 r = remap_eh_region_tree_nr (r, id);
1872 gimple_call_set_arg (copy, 1, r);
1873 /* FALLTHRU */
1875 case BUILT_IN_EH_POINTER:
1876 case BUILT_IN_EH_FILTER:
1877 r = gimple_call_arg (copy, 0);
1878 r = remap_eh_region_tree_nr (r, id);
1879 gimple_call_set_arg (copy, 0, r);
1880 break;
1882 default:
1883 break;
1886 /* Reset alias info if we didn't apply measures to
1887 keep it valid over inlining by setting DECL_PT_UID. */
1888 if (!id->src_cfun->gimple_df
1889 || !id->src_cfun->gimple_df->ipa_pta)
1890 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1892 break;
1894 case GIMPLE_RESX:
1896 gresx *resx_stmt = as_a <gresx *> (copy);
1897 int r = gimple_resx_region (resx_stmt);
1898 r = remap_eh_region_nr (r, id);
1899 gimple_resx_set_region (resx_stmt, r);
1901 break;
1903 case GIMPLE_EH_DISPATCH:
1905 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1906 int r = gimple_eh_dispatch_region (eh_dispatch);
1907 r = remap_eh_region_nr (r, id);
1908 gimple_eh_dispatch_set_region (eh_dispatch, r);
1910 break;
1912 default:
1913 break;
1917 /* If STMT has a block defined, map it to the newly constructed block. */
1918 if (tree block = gimple_block (copy))
1920 tree *n;
1921 n = id->decl_map->get (block);
1922 gcc_assert (n);
1923 gimple_set_block (copy, *n);
1925 if (id->param_body_adjs)
1927 gimple_seq extra_stmts = NULL;
1928 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
1929 if (!gimple_seq_empty_p (extra_stmts))
1931 memset (&wi, 0, sizeof (wi));
1932 wi.info = id;
1933 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1934 !gsi_end_p (egsi);
1935 gsi_next (&egsi))
1936 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1937 gimple_seq_add_seq (&stmts, extra_stmts);
1941 if (id->reset_location)
1942 gimple_set_location (copy, input_location);
1944 /* Debug statements ought to be rebuilt and not copied. */
1945 gcc_checking_assert (!is_gimple_debug (copy));
1947 /* Remap all the operands in COPY. */
1948 memset (&wi, 0, sizeof (wi));
1949 wi.info = id;
1950 if (skip_first)
1951 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1952 else
1953 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1955 /* Clear the copied virtual operands. We are not remapping them here
1956 but are going to recreate them from scratch. */
1957 if (gimple_has_mem_ops (copy))
1959 gimple_set_vdef (copy, NULL_TREE);
1960 gimple_set_vuse (copy, NULL_TREE);
1963 if (cfun->can_throw_non_call_exceptions)
1965 /* When inlining a function which does not have non-call exceptions
1966 enabled into a function that has (which only happens with
1967 always-inline) we have to fixup stmts that cannot throw. */
1968 if (gcond *cond = dyn_cast <gcond *> (copy))
1969 if (gimple_could_trap_p (cond))
1971 gassign *cmp
1972 = gimple_build_assign (make_ssa_name (boolean_type_node),
1973 gimple_cond_code (cond),
1974 gimple_cond_lhs (cond),
1975 gimple_cond_rhs (cond));
1976 gimple_seq_add_stmt (&stmts, cmp);
1977 gimple_cond_set_code (cond, NE_EXPR);
1978 gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
1979 gimple_cond_set_rhs (cond, boolean_false_node);
1983 gimple_seq_add_stmt (&stmts, copy);
1984 return stmts;
1988 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1989 later */
1991 static basic_block
1992 copy_bb (copy_body_data *id, basic_block bb,
1993 profile_count num, profile_count den)
1995 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1996 basic_block copy_basic_block;
1997 tree decl;
1998 basic_block prev;
2000 profile_count::adjust_for_ipa_scaling (&num, &den);
2002 /* Search for previous copied basic block. */
2003 prev = bb->prev_bb;
2004 while (!prev->aux)
2005 prev = prev->prev_bb;
2007 /* create_basic_block() will append every new block to
2008 basic_block_info automatically. */
2009 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2010 copy_basic_block->count = bb->count.apply_scale (num, den);
2012 copy_gsi = gsi_start_bb (copy_basic_block);
2014 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2016 gimple_seq stmts;
2017 gimple *stmt = gsi_stmt (gsi);
2018 gimple *orig_stmt = stmt;
2019 gimple_stmt_iterator stmts_gsi;
2020 bool stmt_added = false;
2022 id->regimplify = false;
2023 stmts = remap_gimple_stmt (stmt, id);
2025 if (gimple_seq_empty_p (stmts))
2026 continue;
2028 seq_gsi = copy_gsi;
2030 for (stmts_gsi = gsi_start (stmts);
2031 !gsi_end_p (stmts_gsi); )
2033 stmt = gsi_stmt (stmts_gsi);
2035 /* Advance iterator now before stmt is moved to seq_gsi. */
2036 gsi_next (&stmts_gsi);
2038 if (gimple_nop_p (stmt))
2039 continue;
2041 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2042 orig_stmt);
2044 /* With return slot optimization we can end up with
2045 non-gimple (foo *)&this->m, fix that here. */
2046 if (is_gimple_assign (stmt)
2047 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2048 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2050 tree new_rhs;
2051 new_rhs = force_gimple_operand_gsi (&seq_gsi,
2052 gimple_assign_rhs1 (stmt),
2053 true, NULL, false,
2054 GSI_CONTINUE_LINKING);
2055 gimple_assign_set_rhs1 (stmt, new_rhs);
2056 id->regimplify = false;
2059 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2061 if (id->regimplify)
2062 gimple_regimplify_operands (stmt, &seq_gsi);
2064 stmt_added = true;
2067 if (!stmt_added)
2068 continue;
2070 /* If copy_basic_block has been empty at the start of this iteration,
2071 call gsi_start_bb again to get at the newly added statements. */
2072 if (gsi_end_p (copy_gsi))
2073 copy_gsi = gsi_start_bb (copy_basic_block);
2074 else
2075 gsi_next (&copy_gsi);
2077 /* Process the new statement. The call to gimple_regimplify_operands
2078 possibly turned the statement into multiple statements, we
2079 need to process all of them. */
2082 tree fn;
2083 gcall *call_stmt;
2085 stmt = gsi_stmt (copy_gsi);
2086 call_stmt = dyn_cast <gcall *> (stmt);
2087 if (call_stmt
2088 && gimple_call_va_arg_pack_p (call_stmt)
2089 && id->call_stmt
2090 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2092 /* __builtin_va_arg_pack () should be replaced by
2093 all arguments corresponding to ... in the caller. */
2094 tree p;
2095 gcall *new_call;
2096 vec<tree> argarray;
2097 size_t nargs = gimple_call_num_args (id->call_stmt);
2098 size_t n;
2100 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2101 nargs--;
2103 /* Create the new array of arguments. */
2104 n = nargs + gimple_call_num_args (call_stmt);
2105 argarray.create (n);
2106 argarray.safe_grow_cleared (n, true);
2108 /* Copy all the arguments before '...' */
2109 memcpy (argarray.address (),
2110 gimple_call_arg_ptr (call_stmt, 0),
2111 gimple_call_num_args (call_stmt) * sizeof (tree));
2113 /* Append the arguments passed in '...' */
2114 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2115 gimple_call_arg_ptr (id->call_stmt, 0)
2116 + (gimple_call_num_args (id->call_stmt) - nargs),
2117 nargs * sizeof (tree));
2119 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2120 argarray);
2122 argarray.release ();
2124 /* Copy all GIMPLE_CALL flags, location and block, except
2125 GF_CALL_VA_ARG_PACK. */
2126 gimple_call_copy_flags (new_call, call_stmt);
2127 gimple_call_set_va_arg_pack (new_call, false);
2128 /* location includes block. */
2129 gimple_set_location (new_call, gimple_location (stmt));
2130 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2132 gsi_replace (&copy_gsi, new_call, false);
2133 stmt = new_call;
2135 else if (call_stmt
2136 && id->call_stmt
2137 && (decl = gimple_call_fndecl (stmt))
2138 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2140 /* __builtin_va_arg_pack_len () should be replaced by
2141 the number of anonymous arguments. */
2142 size_t nargs = gimple_call_num_args (id->call_stmt);
2143 tree count, p;
2144 gimple *new_stmt;
2146 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2147 nargs--;
2149 if (!gimple_call_lhs (stmt))
2151 /* Drop unused calls. */
2152 gsi_remove (&copy_gsi, false);
2153 continue;
2155 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2157 count = build_int_cst (integer_type_node, nargs);
2158 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2159 gsi_replace (&copy_gsi, new_stmt, false);
2160 stmt = new_stmt;
2162 else if (nargs != 0)
2164 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2165 count = build_int_cst (integer_type_node, nargs);
2166 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2167 PLUS_EXPR, newlhs, count);
2168 gimple_call_set_lhs (stmt, newlhs);
2169 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2172 else if (call_stmt
2173 && id->call_stmt
2174 && gimple_call_internal_p (stmt)
2175 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2177 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2178 gsi_remove (&copy_gsi, false);
2179 continue;
2182 /* Statements produced by inlining can be unfolded, especially
2183 when we constant propagated some operands. We can't fold
2184 them right now for two reasons:
2185 1) folding require SSA_NAME_DEF_STMTs to be correct
2186 2) we can't change function calls to builtins.
2187 So we just mark statement for later folding. We mark
2188 all new statements, instead just statements that has changed
2189 by some nontrivial substitution so even statements made
2190 foldable indirectly are updated. If this turns out to be
2191 expensive, copy_body can be told to watch for nontrivial
2192 changes. */
2193 if (id->statements_to_fold)
2194 id->statements_to_fold->add (stmt);
2196 /* We're duplicating a CALL_EXPR. Find any corresponding
2197 callgraph edges and update or duplicate them. */
2198 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2200 struct cgraph_edge *edge;
2202 switch (id->transform_call_graph_edges)
2204 case CB_CGE_DUPLICATE:
2205 edge = id->src_node->get_edge (orig_stmt);
2206 if (edge)
2208 struct cgraph_edge *old_edge = edge;
2210 /* A speculative call is consist of multiple
2211 edges - indirect edge and one or more direct edges
2212 Duplicate the whole thing and distribute frequencies
2213 accordingly. */
2214 if (edge->speculative)
2216 int n = 0;
2217 profile_count direct_cnt
2218 = profile_count::zero ();
2220 /* First figure out the distribution of counts
2221 so we can re-scale BB profile accordingly. */
2222 for (cgraph_edge *e = old_edge; e;
2223 e = e->next_speculative_call_target ())
2224 direct_cnt = direct_cnt + e->count;
2226 cgraph_edge *indirect
2227 = old_edge->speculative_call_indirect_edge ();
2228 profile_count indir_cnt = indirect->count;
2230 /* Next iterate all direct edges, clone it and its
2231 corresponding reference and update profile. */
2232 for (cgraph_edge *e = old_edge;
2234 e = e->next_speculative_call_target ())
2236 profile_count cnt = e->count;
2238 id->dst_node->clone_reference
2239 (e->speculative_call_target_ref (), stmt);
2240 edge = e->clone (id->dst_node, call_stmt,
2241 gimple_uid (stmt), num, den,
2242 true);
2243 profile_probability prob
2244 = cnt.probability_in (direct_cnt
2245 + indir_cnt);
2246 edge->count
2247 = copy_basic_block->count.apply_probability
2248 (prob);
2249 n++;
2251 gcc_checking_assert
2252 (indirect->num_speculative_call_targets_p ()
2253 == n);
2255 /* Duplicate the indirect edge after all direct edges
2256 cloned. */
2257 indirect = indirect->clone (id->dst_node, call_stmt,
2258 gimple_uid (stmt),
2259 num, den,
2260 true);
2262 profile_probability prob
2263 = indir_cnt.probability_in (direct_cnt
2264 + indir_cnt);
2265 indirect->count
2266 = copy_basic_block->count.apply_probability (prob);
2268 else
2270 edge = edge->clone (id->dst_node, call_stmt,
2271 gimple_uid (stmt),
2272 num, den,
2273 true);
2274 edge->count = copy_basic_block->count;
2277 break;
2279 case CB_CGE_MOVE_CLONES:
2280 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2281 call_stmt);
2282 edge = id->dst_node->get_edge (stmt);
2283 break;
2285 case CB_CGE_MOVE:
2286 edge = id->dst_node->get_edge (orig_stmt);
2287 if (edge)
2288 edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2289 break;
2291 default:
2292 gcc_unreachable ();
2295 /* Constant propagation on argument done during inlining
2296 may create new direct call. Produce an edge for it. */
2297 if ((!edge
2298 || (edge->indirect_inlining_edge
2299 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2300 && id->dst_node->definition
2301 && (fn = gimple_call_fndecl (stmt)) != NULL)
2303 struct cgraph_node *dest = cgraph_node::get_create (fn);
2305 /* We have missing edge in the callgraph. This can happen
2306 when previous inlining turned an indirect call into a
2307 direct call by constant propagating arguments or we are
2308 producing dead clone (for further cloning). In all
2309 other cases we hit a bug (incorrect node sharing is the
2310 most common reason for missing edges). */
2311 gcc_assert (!dest->definition
2312 || dest->address_taken
2313 || !id->src_node->definition
2314 || !id->dst_node->definition);
2315 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2316 id->dst_node->create_edge_including_clones
2317 (dest, orig_stmt, call_stmt, bb->count,
2318 CIF_ORIGINALLY_INDIRECT_CALL);
2319 else
2320 id->dst_node->create_edge (dest, call_stmt,
2321 bb->count)->inline_failed
2322 = CIF_ORIGINALLY_INDIRECT_CALL;
2323 if (dump_file)
2325 fprintf (dump_file, "Created new direct edge to %s\n",
2326 dest->dump_name ());
2330 notice_special_calls (as_a <gcall *> (stmt));
2333 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2334 id->eh_map, id->eh_lp_nr);
2336 gsi_next (&copy_gsi);
2338 while (!gsi_end_p (copy_gsi));
2340 copy_gsi = gsi_last_bb (copy_basic_block);
2343 return copy_basic_block;
2346 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2347 form is quite easy, since dominator relationship for old basic blocks does
2348 not change.
2350 There is however exception where inlining might change dominator relation
2351 across EH edges from basic block within inlined functions destinating
2352 to landing pads in function we inline into.
2354 The function fills in PHI_RESULTs of such PHI nodes if they refer
2355 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2356 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2357 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2358 set, and this means that there will be no overlapping live ranges
2359 for the underlying symbol.
2361 This might change in future if we allow redirecting of EH edges and
2362 we might want to change way build CFG pre-inlining to include
2363 all the possible edges then. */
2364 static void
2365 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2366 bool can_throw, bool nonlocal_goto)
2368 edge e;
2369 edge_iterator ei;
2371 FOR_EACH_EDGE (e, ei, bb->succs)
2372 if (!e->dest->aux
2373 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2375 gphi *phi;
2376 gphi_iterator si;
2378 if (!nonlocal_goto)
2379 gcc_assert (e->flags & EDGE_EH);
2381 if (!can_throw)
2382 gcc_assert (!(e->flags & EDGE_EH));
2384 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2386 edge re;
2388 phi = si.phi ();
2390 /* For abnormal goto/call edges the receiver can be the
2391 ENTRY_BLOCK. Do not assert this cannot happen. */
2393 gcc_assert ((e->flags & EDGE_EH)
2394 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2396 re = find_edge (ret_bb, e->dest);
2397 gcc_checking_assert (re);
2398 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2399 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2401 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2402 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2407 /* Insert clobbers for automatic variables of inlined ID->src_fn
2408 function at the start of basic block ID->eh_landing_pad_dest. */
2410 static void
2411 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2413 tree var;
2414 basic_block bb = id->eh_landing_pad_dest;
2415 live_vars_map *vars = NULL;
2416 unsigned int cnt = 0;
2417 unsigned int i;
2418 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2419 if (VAR_P (var)
2420 && !DECL_HARD_REGISTER (var)
2421 && !TREE_THIS_VOLATILE (var)
2422 && !DECL_HAS_VALUE_EXPR_P (var)
2423 && !is_gimple_reg (var)
2424 && auto_var_in_fn_p (var, id->src_fn)
2425 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2427 tree *t = id->decl_map->get (var);
2428 if (!t)
2429 continue;
2430 tree new_var = *t;
2431 if (VAR_P (new_var)
2432 && !DECL_HARD_REGISTER (new_var)
2433 && !TREE_THIS_VOLATILE (new_var)
2434 && !DECL_HAS_VALUE_EXPR_P (new_var)
2435 && !is_gimple_reg (new_var)
2436 && auto_var_in_fn_p (new_var, id->dst_fn))
2438 if (vars == NULL)
2439 vars = new live_vars_map;
2440 vars->put (DECL_UID (var), cnt++);
2443 if (vars == NULL)
2444 return;
2446 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2447 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2448 if (VAR_P (var))
2450 edge e;
2451 edge_iterator ei;
2452 bool needed = false;
2453 unsigned int *v = vars->get (DECL_UID (var));
2454 if (v == NULL)
2455 continue;
2456 FOR_EACH_EDGE (e, ei, bb->preds)
2457 if ((e->flags & EDGE_EH) != 0
2458 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2460 basic_block src_bb = (basic_block) e->src->aux;
2462 if (bitmap_bit_p (&live[src_bb->index], *v))
2464 needed = true;
2465 break;
2468 if (needed)
2470 tree new_var = *id->decl_map->get (var);
2471 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2472 tree clobber = build_clobber (TREE_TYPE (new_var));
2473 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2474 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2477 destroy_live_vars (live);
2478 delete vars;
2481 /* Copy edges from BB into its copy constructed earlier, scale profile
2482 accordingly. Edges will be taken care of later. Assume aux
2483 pointers to point to the copies of each BB. Return true if any
2484 debug stmts are left after a statement that must end the basic block. */
2486 static bool
2487 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2488 basic_block ret_bb, basic_block abnormal_goto_dest,
2489 copy_body_data *id)
2491 basic_block new_bb = (basic_block) bb->aux;
2492 edge_iterator ei;
2493 edge old_edge;
2494 gimple_stmt_iterator si;
2495 bool need_debug_cleanup = false;
2497 /* Use the indices from the original blocks to create edges for the
2498 new ones. */
2499 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2500 if (!(old_edge->flags & EDGE_EH))
2502 edge new_edge;
2503 int flags = old_edge->flags;
2504 location_t locus = old_edge->goto_locus;
2506 /* Return edges do get a FALLTHRU flag when they get inlined. */
2507 if (old_edge->dest->index == EXIT_BLOCK
2508 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2509 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2510 flags |= EDGE_FALLTHRU;
2512 new_edge
2513 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2514 new_edge->probability = old_edge->probability;
2515 if (!id->reset_location)
2516 new_edge->goto_locus = remap_location (locus, id);
2519 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2520 return false;
2522 /* When doing function splitting, we must decrease count of the return block
2523 which was previously reachable by block we did not copy. */
2524 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2525 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2526 if (old_edge->src->index != ENTRY_BLOCK
2527 && !old_edge->src->aux)
2528 new_bb->count -= old_edge->count ().apply_scale (num, den);
2530 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2532 gimple *copy_stmt;
2533 bool can_throw, nonlocal_goto;
2535 copy_stmt = gsi_stmt (si);
2536 if (!is_gimple_debug (copy_stmt))
2537 update_stmt (copy_stmt);
2539 /* Do this before the possible split_block. */
2540 gsi_next (&si);
2542 /* If this tree could throw an exception, there are two
2543 cases where we need to add abnormal edge(s): the
2544 tree wasn't in a region and there is a "current
2545 region" in the caller; or the original tree had
2546 EH edges. In both cases split the block after the tree,
2547 and add abnormal edge(s) as needed; we need both
2548 those from the callee and the caller.
2549 We check whether the copy can throw, because the const
2550 propagation can change an INDIRECT_REF which throws
2551 into a COMPONENT_REF which doesn't. If the copy
2552 can throw, the original could also throw. */
2553 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2554 nonlocal_goto
2555 = (stmt_can_make_abnormal_goto (copy_stmt)
2556 && !computed_goto_p (copy_stmt));
2558 if (can_throw || nonlocal_goto)
2560 if (!gsi_end_p (si))
2562 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2563 gsi_next (&si);
2564 if (gsi_end_p (si))
2565 need_debug_cleanup = true;
2567 if (!gsi_end_p (si))
2568 /* Note that bb's predecessor edges aren't necessarily
2569 right at this point; split_block doesn't care. */
2571 edge e = split_block (new_bb, copy_stmt);
2573 new_bb = e->dest;
2574 new_bb->aux = e->src->aux;
2575 si = gsi_start_bb (new_bb);
2579 bool update_probs = false;
2581 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2583 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2584 update_probs = true;
2586 else if (can_throw)
2588 make_eh_edges (copy_stmt);
2589 update_probs = true;
2592 /* EH edges may not match old edges. Copy as much as possible. */
2593 if (update_probs)
2595 edge e;
2596 edge_iterator ei;
2597 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2599 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2600 if ((old_edge->flags & EDGE_EH)
2601 && (e = find_edge (copy_stmt_bb,
2602 (basic_block) old_edge->dest->aux))
2603 && (e->flags & EDGE_EH))
2604 e->probability = old_edge->probability;
2606 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2607 if (e->flags & EDGE_EH)
2609 if (!e->probability.initialized_p ())
2610 e->probability = profile_probability::never ();
2611 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2613 if (id->eh_landing_pad_dest == NULL)
2614 id->eh_landing_pad_dest = e->dest;
2615 else
2616 gcc_assert (id->eh_landing_pad_dest == e->dest);
2622 /* If the call we inline cannot make abnormal goto do not add
2623 additional abnormal edges but only retain those already present
2624 in the original function body. */
2625 if (abnormal_goto_dest == NULL)
2626 nonlocal_goto = false;
2627 if (nonlocal_goto)
2629 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2631 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2632 nonlocal_goto = false;
2633 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2634 in OpenMP regions which aren't allowed to be left abnormally.
2635 So, no need to add abnormal edge in that case. */
2636 else if (is_gimple_call (copy_stmt)
2637 && gimple_call_internal_p (copy_stmt)
2638 && (gimple_call_internal_fn (copy_stmt)
2639 == IFN_ABNORMAL_DISPATCHER)
2640 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2641 nonlocal_goto = false;
2642 else
2643 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2644 EDGE_ABNORMAL);
2647 if ((can_throw || nonlocal_goto)
2648 && gimple_in_ssa_p (cfun))
2649 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2650 can_throw, nonlocal_goto);
2652 return need_debug_cleanup;
2655 /* Copy the PHIs. All blocks and edges are copied, some blocks
2656 was possibly split and new outgoing EH edges inserted.
2657 BB points to the block of original function and AUX pointers links
2658 the original and newly copied blocks. */
2660 static void
2661 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2663 basic_block const new_bb = (basic_block) bb->aux;
2664 edge_iterator ei;
2665 gphi *phi;
2666 gphi_iterator si;
2667 edge new_edge;
2668 bool inserted = false;
2670 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2672 tree res, new_res;
2673 gphi *new_phi;
2675 phi = si.phi ();
2676 res = PHI_RESULT (phi);
2677 new_res = res;
2678 if (!virtual_operand_p (res))
2680 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2681 if (EDGE_COUNT (new_bb->preds) == 0)
2683 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2684 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2686 else
2688 new_phi = create_phi_node (new_res, new_bb);
2689 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2691 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2692 bb);
2693 tree arg;
2694 tree new_arg;
2695 edge_iterator ei2;
2696 location_t locus;
2698 /* When doing partial cloning, we allow PHIs on the entry
2699 block as long as all the arguments are the same.
2700 Find any input edge to see argument to copy. */
2701 if (!old_edge)
2702 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2703 if (!old_edge->src->aux)
2704 break;
2706 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2707 new_arg = arg;
2708 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2709 gcc_assert (new_arg);
2710 /* With return slot optimization we can end up with
2711 non-gimple (foo *)&this->m, fix that here. */
2712 if (TREE_CODE (new_arg) != SSA_NAME
2713 && TREE_CODE (new_arg) != FUNCTION_DECL
2714 && !is_gimple_val (new_arg))
2716 gimple_seq stmts = NULL;
2717 new_arg = force_gimple_operand (new_arg, &stmts, true,
2718 NULL);
2719 gsi_insert_seq_on_edge (new_edge, stmts);
2720 inserted = true;
2722 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2723 if (id->reset_location)
2724 locus = input_location;
2725 else
2726 locus = remap_location (locus, id);
2727 add_phi_arg (new_phi, new_arg, new_edge, locus);
2733 /* Commit the delayed edge insertions. */
2734 if (inserted)
2735 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2736 gsi_commit_one_edge_insert (new_edge, NULL);
2740 /* Wrapper for remap_decl so it can be used as a callback. */
2742 static tree
2743 remap_decl_1 (tree decl, void *data)
2745 return remap_decl (decl, (copy_body_data *) data);
2748 /* Build struct function and associated datastructures for the new clone
2749 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2750 the cfun to the function of new_fndecl (and current_function_decl too). */
2752 static void
2753 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2755 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2757 if (!DECL_ARGUMENTS (new_fndecl))
2758 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2759 if (!DECL_RESULT (new_fndecl))
2760 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2762 /* Register specific tree functions. */
2763 gimple_register_cfg_hooks ();
2765 /* Get clean struct function. */
2766 push_struct_function (new_fndecl);
2768 /* We will rebuild these, so just sanity check that they are empty. */
2769 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2770 gcc_assert (cfun->local_decls == NULL);
2771 gcc_assert (cfun->cfg == NULL);
2772 gcc_assert (cfun->decl == new_fndecl);
2774 /* Copy items we preserve during cloning. */
2775 cfun->static_chain_decl = src_cfun->static_chain_decl;
2776 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2777 cfun->function_end_locus = src_cfun->function_end_locus;
2778 cfun->curr_properties = src_cfun->curr_properties;
2779 cfun->last_verified = src_cfun->last_verified;
2780 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2781 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2782 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2783 cfun->calls_eh_return = src_cfun->calls_eh_return;
2784 cfun->stdarg = src_cfun->stdarg;
2785 cfun->after_inlining = src_cfun->after_inlining;
2786 cfun->can_throw_non_call_exceptions
2787 = src_cfun->can_throw_non_call_exceptions;
2788 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2789 cfun->returns_struct = src_cfun->returns_struct;
2790 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2792 init_empty_tree_cfg ();
2794 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2796 profile_count num = count;
2797 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2798 profile_count::adjust_for_ipa_scaling (&num, &den);
2800 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2801 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2802 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2803 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2804 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2805 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2806 if (src_cfun->eh)
2807 init_eh_for_function ();
2809 if (src_cfun->gimple_df)
2811 init_tree_ssa (cfun);
2812 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2813 if (cfun->gimple_df->in_ssa_p)
2814 init_ssa_operands (cfun);
2818 /* Helper function for copy_cfg_body. Move debug stmts from the end
2819 of NEW_BB to the beginning of successor basic blocks when needed. If the
2820 successor has multiple predecessors, reset them, otherwise keep
2821 their value. */
2823 static void
2824 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2826 edge e;
2827 edge_iterator ei;
2828 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2830 if (gsi_end_p (si)
2831 || gsi_one_before_end_p (si)
2832 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2833 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2834 return;
2836 FOR_EACH_EDGE (e, ei, new_bb->succs)
2838 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2839 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2840 while (is_gimple_debug (gsi_stmt (ssi)))
2842 gimple *stmt = gsi_stmt (ssi);
2843 gdebug *new_stmt;
2844 tree var;
2845 tree value;
2847 /* For the last edge move the debug stmts instead of copying
2848 them. */
2849 if (ei_one_before_end_p (ei))
2851 si = ssi;
2852 gsi_prev (&ssi);
2853 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2855 gimple_debug_bind_reset_value (stmt);
2856 gimple_set_location (stmt, UNKNOWN_LOCATION);
2858 gsi_remove (&si, false);
2859 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2860 continue;
2863 if (gimple_debug_bind_p (stmt))
2865 var = gimple_debug_bind_get_var (stmt);
2866 if (single_pred_p (e->dest))
2868 value = gimple_debug_bind_get_value (stmt);
2869 value = unshare_expr (value);
2870 new_stmt = gimple_build_debug_bind (var, value, stmt);
2872 else
2873 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2875 else if (gimple_debug_source_bind_p (stmt))
2877 var = gimple_debug_source_bind_get_var (stmt);
2878 value = gimple_debug_source_bind_get_value (stmt);
2879 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2881 else if (gimple_debug_nonbind_marker_p (stmt))
2882 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2883 else
2884 gcc_unreachable ();
2885 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2886 id->debug_stmts.safe_push (new_stmt);
2887 gsi_prev (&ssi);
2892 /* Make a copy of the sub-loops of SRC_PARENT and place them
2893 as siblings of DEST_PARENT. */
2895 static void
2896 copy_loops (copy_body_data *id,
2897 class loop *dest_parent, class loop *src_parent)
2899 class loop *src_loop = src_parent->inner;
2900 while (src_loop)
2902 if (!id->blocks_to_copy
2903 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2905 class loop *dest_loop = alloc_loop ();
2907 /* Assign the new loop its header and latch and associate
2908 those with the new loop. */
2909 dest_loop->header = (basic_block)src_loop->header->aux;
2910 dest_loop->header->loop_father = dest_loop;
2911 if (src_loop->latch != NULL)
2913 dest_loop->latch = (basic_block)src_loop->latch->aux;
2914 dest_loop->latch->loop_father = dest_loop;
2917 /* Copy loop meta-data. */
2918 copy_loop_info (src_loop, dest_loop);
2919 if (dest_loop->unroll)
2920 cfun->has_unroll = true;
2921 if (dest_loop->force_vectorize)
2922 cfun->has_force_vectorize_loops = true;
2923 if (id->src_cfun->last_clique != 0)
2924 dest_loop->owned_clique
2925 = remap_dependence_clique (id,
2926 src_loop->owned_clique
2927 ? src_loop->owned_clique : 1);
2929 /* Finally place it into the loop array and the loop tree. */
2930 place_new_loop (cfun, dest_loop);
2931 flow_loop_tree_node_add (dest_parent, dest_loop);
2933 if (src_loop->simduid)
2935 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2936 cfun->has_simduid_loops = true;
2939 /* Recurse. */
2940 copy_loops (id, dest_loop, src_loop);
2942 src_loop = src_loop->next;
2946 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2948 void
2949 redirect_all_calls (copy_body_data * id, basic_block bb)
2951 gimple_stmt_iterator si;
2952 gimple *last = last_stmt (bb);
2953 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2955 gimple *stmt = gsi_stmt (si);
2956 if (is_gimple_call (stmt))
2958 tree old_lhs = gimple_call_lhs (stmt);
2959 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2960 if (edge)
2962 gimple *new_stmt
2963 = cgraph_edge::redirect_call_stmt_to_callee (edge);
2964 /* If IPA-SRA transformation, run as part of edge redirection,
2965 removed the LHS because it is unused, save it to
2966 killed_new_ssa_names so that we can prune it from debug
2967 statements. */
2968 if (old_lhs
2969 && TREE_CODE (old_lhs) == SSA_NAME
2970 && !gimple_call_lhs (new_stmt))
2972 if (!id->killed_new_ssa_names)
2973 id->killed_new_ssa_names = new hash_set<tree> (16);
2974 id->killed_new_ssa_names->add (old_lhs);
2977 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2978 gimple_purge_dead_eh_edges (bb);
2984 /* Make a copy of the body of FN so that it can be inserted inline in
2985 another function. Walks FN via CFG, returns new fndecl. */
2987 static tree
2988 copy_cfg_body (copy_body_data * id,
2989 basic_block entry_block_map, basic_block exit_block_map,
2990 basic_block new_entry)
2992 tree callee_fndecl = id->src_fn;
2993 /* Original cfun for the callee, doesn't change. */
2994 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2995 struct function *cfun_to_copy;
2996 basic_block bb;
2997 tree new_fndecl = NULL;
2998 bool need_debug_cleanup = false;
2999 int last;
3000 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3001 profile_count num = entry_block_map->count;
3003 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3005 /* Register specific tree functions. */
3006 gimple_register_cfg_hooks ();
3008 /* If we are inlining just region of the function, make sure to connect
3009 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
3010 part of loop, we must compute frequency and probability of
3011 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3012 probabilities of edges incoming from nonduplicated region. */
3013 if (new_entry)
3015 edge e;
3016 edge_iterator ei;
3017 den = profile_count::zero ();
3019 FOR_EACH_EDGE (e, ei, new_entry->preds)
3020 if (!e->src->aux)
3021 den += e->count ();
3022 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
3025 profile_count::adjust_for_ipa_scaling (&num, &den);
3027 /* Must have a CFG here at this point. */
3028 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3029 (DECL_STRUCT_FUNCTION (callee_fndecl)));
3032 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3033 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3034 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3035 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3037 /* Duplicate any exception-handling regions. */
3038 if (cfun->eh)
3039 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3040 remap_decl_1, id);
3042 /* Use aux pointers to map the original blocks to copy. */
3043 FOR_EACH_BB_FN (bb, cfun_to_copy)
3044 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3046 basic_block new_bb = copy_bb (id, bb, num, den);
3047 bb->aux = new_bb;
3048 new_bb->aux = bb;
3049 new_bb->loop_father = entry_block_map->loop_father;
3052 last = last_basic_block_for_fn (cfun);
3054 /* Now that we've duplicated the blocks, duplicate their edges. */
3055 basic_block abnormal_goto_dest = NULL;
3056 if (id->call_stmt
3057 && stmt_can_make_abnormal_goto (id->call_stmt))
3059 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3061 bb = gimple_bb (id->call_stmt);
3062 gsi_next (&gsi);
3063 if (gsi_end_p (gsi))
3064 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3066 FOR_ALL_BB_FN (bb, cfun_to_copy)
3067 if (!id->blocks_to_copy
3068 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3069 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3070 abnormal_goto_dest, id);
3072 if (id->eh_landing_pad_dest)
3074 add_clobbers_to_eh_landing_pad (id);
3075 id->eh_landing_pad_dest = NULL;
3078 if (new_entry)
3080 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3081 EDGE_FALLTHRU);
3082 e->probability = profile_probability::always ();
3085 /* Duplicate the loop tree, if available and wanted. */
3086 if (loops_for_fn (src_cfun) != NULL
3087 && current_loops != NULL)
3089 copy_loops (id, entry_block_map->loop_father,
3090 get_loop (src_cfun, 0));
3091 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3092 loops_state_set (LOOPS_NEED_FIXUP);
3095 /* If the loop tree in the source function needed fixup, mark the
3096 destination loop tree for fixup, too. */
3097 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3098 loops_state_set (LOOPS_NEED_FIXUP);
3100 if (gimple_in_ssa_p (cfun))
3101 FOR_ALL_BB_FN (bb, cfun_to_copy)
3102 if (!id->blocks_to_copy
3103 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3104 copy_phis_for_bb (bb, id);
3106 FOR_ALL_BB_FN (bb, cfun_to_copy)
3107 if (bb->aux)
3109 if (need_debug_cleanup
3110 && bb->index != ENTRY_BLOCK
3111 && bb->index != EXIT_BLOCK)
3112 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3113 /* Update call edge destinations. This cannot be done before loop
3114 info is updated, because we may split basic blocks. */
3115 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3116 && bb->index != ENTRY_BLOCK
3117 && bb->index != EXIT_BLOCK)
3118 redirect_all_calls (id, (basic_block)bb->aux);
3119 ((basic_block)bb->aux)->aux = NULL;
3120 bb->aux = NULL;
3123 /* Zero out AUX fields of newly created block during EH edge
3124 insertion. */
3125 for (; last < last_basic_block_for_fn (cfun); last++)
3127 if (need_debug_cleanup)
3128 maybe_move_debug_stmts_to_successors (id,
3129 BASIC_BLOCK_FOR_FN (cfun, last));
3130 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3131 /* Update call edge destinations. This cannot be done before loop
3132 info is updated, because we may split basic blocks. */
3133 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3134 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3136 entry_block_map->aux = NULL;
3137 exit_block_map->aux = NULL;
3139 if (id->eh_map)
3141 delete id->eh_map;
3142 id->eh_map = NULL;
3144 if (id->dependence_map)
3146 delete id->dependence_map;
3147 id->dependence_map = NULL;
3150 return new_fndecl;
3153 /* Copy the debug STMT using ID. We deal with these statements in a
3154 special way: if any variable in their VALUE expression wasn't
3155 remapped yet, we won't remap it, because that would get decl uids
3156 out of sync, causing codegen differences between -g and -g0. If
3157 this arises, we drop the VALUE expression altogether. */
3159 static void
3160 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3162 tree t, *n;
3163 struct walk_stmt_info wi;
3165 if (tree block = gimple_block (stmt))
3167 n = id->decl_map->get (block);
3168 gimple_set_block (stmt, n ? *n : id->block);
3171 if (gimple_debug_nonbind_marker_p (stmt))
3172 return;
3174 /* Remap all the operands in COPY. */
3175 memset (&wi, 0, sizeof (wi));
3176 wi.info = id;
3178 processing_debug_stmt = 1;
3180 if (gimple_debug_source_bind_p (stmt))
3181 t = gimple_debug_source_bind_get_var (stmt);
3182 else if (gimple_debug_bind_p (stmt))
3183 t = gimple_debug_bind_get_var (stmt);
3184 else
3185 gcc_unreachable ();
3187 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3188 && (n = id->debug_map->get (t)))
3190 gcc_assert (VAR_P (*n));
3191 t = *n;
3193 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3194 /* T is a non-localized variable. */;
3195 else
3196 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3198 if (gimple_debug_bind_p (stmt))
3200 gimple_debug_bind_set_var (stmt, t);
3202 if (gimple_debug_bind_has_value_p (stmt))
3203 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3204 remap_gimple_op_r, &wi, NULL);
3206 /* Punt if any decl couldn't be remapped. */
3207 if (processing_debug_stmt < 0)
3208 gimple_debug_bind_reset_value (stmt);
3210 else if (gimple_debug_source_bind_p (stmt))
3212 gimple_debug_source_bind_set_var (stmt, t);
3213 /* When inlining and source bind refers to one of the optimized
3214 away parameters, change the source bind into normal debug bind
3215 referring to the corresponding DEBUG_EXPR_DECL that should have
3216 been bound before the call stmt. */
3217 t = gimple_debug_source_bind_get_value (stmt);
3218 if (t != NULL_TREE
3219 && TREE_CODE (t) == PARM_DECL
3220 && id->call_stmt)
3222 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3223 unsigned int i;
3224 if (debug_args != NULL)
3226 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3227 if ((**debug_args)[i] == DECL_ORIGIN (t)
3228 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3230 t = (**debug_args)[i + 1];
3231 stmt->subcode = GIMPLE_DEBUG_BIND;
3232 gimple_debug_bind_set_value (stmt, t);
3233 break;
3237 if (gimple_debug_source_bind_p (stmt))
3238 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3239 remap_gimple_op_r, &wi, NULL);
3242 processing_debug_stmt = 0;
3244 update_stmt (stmt);
3247 /* Process deferred debug stmts. In order to give values better odds
3248 of being successfully remapped, we delay the processing of debug
3249 stmts until all other stmts that might require remapping are
3250 processed. */
3252 static void
3253 copy_debug_stmts (copy_body_data *id)
3255 size_t i;
3256 gdebug *stmt;
3258 if (!id->debug_stmts.exists ())
3259 return;
3261 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3262 copy_debug_stmt (stmt, id);
3264 id->debug_stmts.release ();
3267 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3268 another function. */
3270 static tree
3271 copy_tree_body (copy_body_data *id)
3273 tree fndecl = id->src_fn;
3274 tree body = DECL_SAVED_TREE (fndecl);
3276 walk_tree (&body, copy_tree_body_r, id, NULL);
3278 return body;
3281 /* Make a copy of the body of FN so that it can be inserted inline in
3282 another function. */
3284 static tree
3285 copy_body (copy_body_data *id,
3286 basic_block entry_block_map, basic_block exit_block_map,
3287 basic_block new_entry)
3289 tree fndecl = id->src_fn;
3290 tree body;
3292 /* If this body has a CFG, walk CFG and copy. */
3293 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3294 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3295 new_entry);
3296 copy_debug_stmts (id);
3297 delete id->killed_new_ssa_names;
3298 id->killed_new_ssa_names = NULL;
3300 return body;
3303 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3304 defined in function FN, or of a data member thereof. */
3306 static bool
3307 self_inlining_addr_expr (tree value, tree fn)
3309 tree var;
3311 if (TREE_CODE (value) != ADDR_EXPR)
3312 return false;
3314 var = get_base_address (TREE_OPERAND (value, 0));
3316 return var && auto_var_in_fn_p (var, fn);
3319 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3320 lexical block and line number information from base_stmt, if given,
3321 or from the last stmt of the block otherwise. */
3323 static gimple *
3324 insert_init_debug_bind (copy_body_data *id,
3325 basic_block bb, tree var, tree value,
3326 gimple *base_stmt)
3328 gimple *note;
3329 gimple_stmt_iterator gsi;
3330 tree tracked_var;
3332 if (!gimple_in_ssa_p (id->src_cfun))
3333 return NULL;
3335 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3336 return NULL;
3338 tracked_var = target_for_debug_bind (var);
3339 if (!tracked_var)
3340 return NULL;
3342 if (bb)
3344 gsi = gsi_last_bb (bb);
3345 if (!base_stmt && !gsi_end_p (gsi))
3346 base_stmt = gsi_stmt (gsi);
3349 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3351 if (bb)
3353 if (!gsi_end_p (gsi))
3354 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3355 else
3356 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3359 return note;
3362 static void
3363 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3365 /* If VAR represents a zero-sized variable, it's possible that the
3366 assignment statement may result in no gimple statements. */
3367 if (init_stmt)
3369 gimple_stmt_iterator si = gsi_last_bb (bb);
3371 /* We can end up with init statements that store to a non-register
3372 from a rhs with a conversion. Handle that here by forcing the
3373 rhs into a temporary. gimple_regimplify_operands is not
3374 prepared to do this for us. */
3375 if (!is_gimple_debug (init_stmt)
3376 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3377 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3378 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3380 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3381 gimple_expr_type (init_stmt),
3382 gimple_assign_rhs1 (init_stmt));
3383 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3384 GSI_NEW_STMT);
3385 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3386 gimple_assign_set_rhs1 (init_stmt, rhs);
3388 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3389 if (!is_gimple_debug (init_stmt))
3391 gimple_regimplify_operands (init_stmt, &si);
3393 tree def = gimple_assign_lhs (init_stmt);
3394 insert_init_debug_bind (id, bb, def, def, init_stmt);
3399 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3400 if need be (which should only be necessary for invalid programs). Attempt
3401 to convert VAL to TYPE and return the result if it is possible, just return
3402 a zero constant of the given type if it fails. */
3404 tree
3405 force_value_to_type (tree type, tree value)
3407 /* If we can match up types by promotion/demotion do so. */
3408 if (fold_convertible_p (type, value))
3409 return fold_convert (type, value);
3411 /* ??? For valid programs we should not end up here.
3412 Still if we end up with truly mismatched types here, fall back
3413 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3414 GIMPLE to the following passes. */
3415 if (!is_gimple_reg_type (TREE_TYPE (value))
3416 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3417 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3418 else
3419 return build_zero_cst (type);
3422 /* Initialize parameter P with VALUE. If needed, produce init statement
3423 at the end of BB. When BB is NULL, we return init statement to be
3424 output later. */
3425 static gimple *
3426 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3427 basic_block bb, tree *vars)
3429 gimple *init_stmt = NULL;
3430 tree var;
3431 tree rhs = value;
3432 tree def = (gimple_in_ssa_p (cfun)
3433 ? ssa_default_def (id->src_cfun, p) : NULL);
3435 if (value
3436 && value != error_mark_node
3437 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3438 rhs = force_value_to_type (TREE_TYPE (p), value);
3440 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3441 here since the type of this decl must be visible to the calling
3442 function. */
3443 var = copy_decl_to_var (p, id);
3445 /* Declare this new variable. */
3446 DECL_CHAIN (var) = *vars;
3447 *vars = var;
3449 /* Make gimplifier happy about this variable. */
3450 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3452 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3453 we would not need to create a new variable here at all, if it
3454 weren't for debug info. Still, we can just use the argument
3455 value. */
3456 if (TREE_READONLY (p)
3457 && !TREE_ADDRESSABLE (p)
3458 && value && !TREE_SIDE_EFFECTS (value)
3459 && !def)
3461 /* We may produce non-gimple trees by adding NOPs or introduce
3462 invalid sharing when operand is not really constant.
3463 It is not big deal to prohibit constant propagation here as
3464 we will constant propagate in DOM1 pass anyway. */
3465 if (is_gimple_min_invariant (value)
3466 && useless_type_conversion_p (TREE_TYPE (p),
3467 TREE_TYPE (value))
3468 /* We have to be very careful about ADDR_EXPR. Make sure
3469 the base variable isn't a local variable of the inlined
3470 function, e.g., when doing recursive inlining, direct or
3471 mutually-recursive or whatever, which is why we don't
3472 just test whether fn == current_function_decl. */
3473 && ! self_inlining_addr_expr (value, fn))
3475 insert_decl_map (id, p, value);
3476 insert_debug_decl_map (id, p, var);
3477 return insert_init_debug_bind (id, bb, var, value, NULL);
3481 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3482 that way, when the PARM_DECL is encountered, it will be
3483 automatically replaced by the VAR_DECL. */
3484 insert_decl_map (id, p, var);
3486 /* Even if P was TREE_READONLY, the new VAR should not be.
3487 In the original code, we would have constructed a
3488 temporary, and then the function body would have never
3489 changed the value of P. However, now, we will be
3490 constructing VAR directly. The constructor body may
3491 change its value multiple times as it is being
3492 constructed. Therefore, it must not be TREE_READONLY;
3493 the back-end assumes that TREE_READONLY variable is
3494 assigned to only once. */
3495 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3496 TREE_READONLY (var) = 0;
3498 /* If there is no setup required and we are in SSA, take the easy route
3499 replacing all SSA names representing the function parameter by the
3500 SSA name passed to function.
3502 We need to construct map for the variable anyway as it might be used
3503 in different SSA names when parameter is set in function.
3505 Do replacement at -O0 for const arguments replaced by constant.
3506 This is important for builtin_constant_p and other construct requiring
3507 constant argument to be visible in inlined function body. */
3508 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3509 && (optimize
3510 || (TREE_READONLY (p)
3511 && is_gimple_min_invariant (rhs)))
3512 && (TREE_CODE (rhs) == SSA_NAME
3513 || is_gimple_min_invariant (rhs))
3514 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3516 insert_decl_map (id, def, rhs);
3517 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3520 /* If the value of argument is never used, don't care about initializing
3521 it. */
3522 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3524 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3525 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3528 /* Initialize this VAR_DECL from the equivalent argument. Convert
3529 the argument to the proper type in case it was promoted. */
3530 if (value)
3532 if (rhs == error_mark_node)
3534 insert_decl_map (id, p, var);
3535 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3538 STRIP_USELESS_TYPE_CONVERSION (rhs);
3540 /* If we are in SSA form properly remap the default definition
3541 or assign to a dummy SSA name if the parameter is unused and
3542 we are not optimizing. */
3543 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3545 if (def)
3547 def = remap_ssa_name (def, id);
3548 init_stmt = gimple_build_assign (def, rhs);
3549 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3550 set_ssa_default_def (cfun, var, NULL);
3552 else if (!optimize)
3554 def = make_ssa_name (var);
3555 init_stmt = gimple_build_assign (def, rhs);
3558 else
3559 init_stmt = gimple_build_assign (var, rhs);
3561 if (bb && init_stmt)
3562 insert_init_stmt (id, bb, init_stmt);
3564 return init_stmt;
3567 /* Generate code to initialize the parameters of the function at the
3568 top of the stack in ID from the GIMPLE_CALL STMT. */
3570 static void
3571 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3572 tree fn, basic_block bb)
3574 tree parms;
3575 size_t i;
3576 tree p;
3577 tree vars = NULL_TREE;
3578 tree static_chain = gimple_call_chain (stmt);
3580 /* Figure out what the parameters are. */
3581 parms = DECL_ARGUMENTS (fn);
3583 /* Loop through the parameter declarations, replacing each with an
3584 equivalent VAR_DECL, appropriately initialized. */
3585 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3587 tree val;
3588 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3589 setup_one_parameter (id, p, val, fn, bb, &vars);
3591 /* After remapping parameters remap their types. This has to be done
3592 in a second loop over all parameters to appropriately remap
3593 variable sized arrays when the size is specified in a
3594 parameter following the array. */
3595 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3597 tree *varp = id->decl_map->get (p);
3598 if (varp && VAR_P (*varp))
3600 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3601 ? ssa_default_def (id->src_cfun, p) : NULL);
3602 tree var = *varp;
3603 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3604 /* Also remap the default definition if it was remapped
3605 to the default definition of the parameter replacement
3606 by the parameter setup. */
3607 if (def)
3609 tree *defp = id->decl_map->get (def);
3610 if (defp
3611 && TREE_CODE (*defp) == SSA_NAME
3612 && SSA_NAME_VAR (*defp) == var)
3613 TREE_TYPE (*defp) = TREE_TYPE (var);
3618 /* Initialize the static chain. */
3619 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3620 gcc_assert (fn != current_function_decl);
3621 if (p)
3623 /* No static chain? Seems like a bug in tree-nested.c. */
3624 gcc_assert (static_chain);
3626 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3629 declare_inline_vars (id->block, vars);
3633 /* Declare a return variable to replace the RESULT_DECL for the
3634 function we are calling. An appropriate DECL_STMT is returned.
3635 The USE_STMT is filled to contain a use of the declaration to
3636 indicate the return value of the function.
3638 RETURN_SLOT, if non-null is place where to store the result. It
3639 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3640 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3642 The return value is a (possibly null) value that holds the result
3643 as seen by the caller. */
3645 static tree
3646 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3647 basic_block entry_bb)
3649 tree callee = id->src_fn;
3650 tree result = DECL_RESULT (callee);
3651 tree callee_type = TREE_TYPE (result);
3652 tree caller_type;
3653 tree var, use;
3655 /* Handle type-mismatches in the function declaration return type
3656 vs. the call expression. */
3657 if (modify_dest)
3658 caller_type = TREE_TYPE (modify_dest);
3659 else if (return_slot)
3660 caller_type = TREE_TYPE (return_slot);
3661 else /* No LHS on the call. */
3662 caller_type = TREE_TYPE (TREE_TYPE (callee));
3664 /* We don't need to do anything for functions that don't return anything. */
3665 if (VOID_TYPE_P (callee_type))
3666 return NULL_TREE;
3668 /* If there was a return slot, then the return value is the
3669 dereferenced address of that object. */
3670 if (return_slot)
3672 /* The front end shouldn't have used both return_slot and
3673 a modify expression. */
3674 gcc_assert (!modify_dest);
3675 if (DECL_BY_REFERENCE (result))
3677 tree return_slot_addr = build_fold_addr_expr (return_slot);
3678 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3680 /* We are going to construct *&return_slot and we can't do that
3681 for variables believed to be not addressable.
3683 FIXME: This check possibly can match, because values returned
3684 via return slot optimization are not believed to have address
3685 taken by alias analysis. */
3686 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3687 var = return_slot_addr;
3688 mark_addressable (return_slot);
3690 else
3692 var = return_slot;
3693 gcc_assert (TREE_CODE (var) != SSA_NAME);
3694 if (TREE_ADDRESSABLE (result))
3695 mark_addressable (var);
3697 if (DECL_NOT_GIMPLE_REG_P (result)
3698 && DECL_P (var))
3699 DECL_NOT_GIMPLE_REG_P (var) = 1;
3701 if (!useless_type_conversion_p (callee_type, caller_type))
3702 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3704 use = NULL;
3705 goto done;
3708 /* All types requiring non-trivial constructors should have been handled. */
3709 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3711 /* Attempt to avoid creating a new temporary variable. */
3712 if (modify_dest
3713 && TREE_CODE (modify_dest) != SSA_NAME)
3715 bool use_it = false;
3717 /* We can't use MODIFY_DEST if there's type promotion involved. */
3718 if (!useless_type_conversion_p (callee_type, caller_type))
3719 use_it = false;
3721 /* ??? If we're assigning to a variable sized type, then we must
3722 reuse the destination variable, because we've no good way to
3723 create variable sized temporaries at this point. */
3724 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3725 use_it = true;
3727 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3728 reuse it as the result of the call directly. Don't do this if
3729 it would promote MODIFY_DEST to addressable. */
3730 else if (TREE_ADDRESSABLE (result))
3731 use_it = false;
3732 else
3734 tree base_m = get_base_address (modify_dest);
3736 /* If the base isn't a decl, then it's a pointer, and we don't
3737 know where that's going to go. */
3738 if (!DECL_P (base_m))
3739 use_it = false;
3740 else if (is_global_var (base_m))
3741 use_it = false;
3742 else if (DECL_NOT_GIMPLE_REG_P (result)
3743 && !DECL_NOT_GIMPLE_REG_P (base_m))
3744 use_it = false;
3745 else if (!TREE_ADDRESSABLE (base_m))
3746 use_it = true;
3749 if (use_it)
3751 var = modify_dest;
3752 use = NULL;
3753 goto done;
3757 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3759 var = copy_result_decl_to_var (result, id);
3760 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3762 /* Do not have the rest of GCC warn about this variable as it should
3763 not be visible to the user. */
3764 TREE_NO_WARNING (var) = 1;
3766 declare_inline_vars (id->block, var);
3768 /* Build the use expr. If the return type of the function was
3769 promoted, convert it back to the expected type. */
3770 use = var;
3771 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3773 /* If we can match up types by promotion/demotion do so. */
3774 if (fold_convertible_p (caller_type, var))
3775 use = fold_convert (caller_type, var);
3776 else
3778 /* ??? For valid programs we should not end up here.
3779 Still if we end up with truly mismatched types here, fall back
3780 to using a MEM_REF to not leak invalid GIMPLE to the following
3781 passes. */
3782 /* Prevent var from being written into SSA form. */
3783 if (is_gimple_reg_type (TREE_TYPE (var)))
3784 DECL_NOT_GIMPLE_REG_P (var) = true;
3785 use = fold_build2 (MEM_REF, caller_type,
3786 build_fold_addr_expr (var),
3787 build_int_cst (ptr_type_node, 0));
3791 STRIP_USELESS_TYPE_CONVERSION (use);
3793 if (DECL_BY_REFERENCE (result))
3795 TREE_ADDRESSABLE (var) = 1;
3796 var = build_fold_addr_expr (var);
3799 done:
3800 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3801 way, when the RESULT_DECL is encountered, it will be
3802 automatically replaced by the VAR_DECL.
3804 When returning by reference, ensure that RESULT_DECL remaps to
3805 gimple_val. */
3806 if (DECL_BY_REFERENCE (result)
3807 && !is_gimple_val (var))
3809 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3810 insert_decl_map (id, result, temp);
3811 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3812 it's default_def SSA_NAME. */
3813 if (gimple_in_ssa_p (id->src_cfun)
3814 && is_gimple_reg (result))
3816 temp = make_ssa_name (temp);
3817 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3819 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3821 else
3822 insert_decl_map (id, result, var);
3824 /* Remember this so we can ignore it in remap_decls. */
3825 id->retvar = var;
3826 return use;
3829 /* Determine if the function can be copied. If so return NULL. If
3830 not return a string describng the reason for failure. */
3832 const char *
3833 copy_forbidden (struct function *fun)
3835 const char *reason = fun->cannot_be_copied_reason;
3837 /* Only examine the function once. */
3838 if (fun->cannot_be_copied_set)
3839 return reason;
3841 /* We cannot copy a function that receives a non-local goto
3842 because we cannot remap the destination label used in the
3843 function that is performing the non-local goto. */
3844 /* ??? Actually, this should be possible, if we work at it.
3845 No doubt there's just a handful of places that simply
3846 assume it doesn't happen and don't substitute properly. */
3847 if (fun->has_nonlocal_label)
3849 reason = G_("function %q+F can never be copied "
3850 "because it receives a non-local goto");
3851 goto fail;
3854 if (fun->has_forced_label_in_static)
3856 reason = G_("function %q+F can never be copied because it saves "
3857 "address of local label in a static variable");
3858 goto fail;
3861 fail:
3862 fun->cannot_be_copied_reason = reason;
3863 fun->cannot_be_copied_set = true;
3864 return reason;
3868 static const char *inline_forbidden_reason;
3870 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3871 iff a function cannot be inlined. Also sets the reason why. */
3873 static tree
3874 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3875 struct walk_stmt_info *wip)
3877 tree fn = (tree) wip->info;
3878 tree t;
3879 gimple *stmt = gsi_stmt (*gsi);
3881 switch (gimple_code (stmt))
3883 case GIMPLE_CALL:
3884 /* Refuse to inline alloca call unless user explicitly forced so as
3885 this may change program's memory overhead drastically when the
3886 function using alloca is called in loop. In GCC present in
3887 SPEC2000 inlining into schedule_block cause it to require 2GB of
3888 RAM instead of 256MB. Don't do so for alloca calls emitted for
3889 VLA objects as those can't cause unbounded growth (they're always
3890 wrapped inside stack_save/stack_restore regions. */
3891 if (gimple_maybe_alloca_call_p (stmt)
3892 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3893 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3895 inline_forbidden_reason
3896 = G_("function %q+F can never be inlined because it uses "
3897 "alloca (override using the always_inline attribute)");
3898 *handled_ops_p = true;
3899 return fn;
3902 t = gimple_call_fndecl (stmt);
3903 if (t == NULL_TREE)
3904 break;
3906 /* We cannot inline functions that call setjmp. */
3907 if (setjmp_call_p (t))
3909 inline_forbidden_reason
3910 = G_("function %q+F can never be inlined because it uses setjmp");
3911 *handled_ops_p = true;
3912 return t;
3915 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3916 switch (DECL_FUNCTION_CODE (t))
3918 /* We cannot inline functions that take a variable number of
3919 arguments. */
3920 case BUILT_IN_VA_START:
3921 case BUILT_IN_NEXT_ARG:
3922 case BUILT_IN_VA_END:
3923 inline_forbidden_reason
3924 = G_("function %q+F can never be inlined because it "
3925 "uses variable argument lists");
3926 *handled_ops_p = true;
3927 return t;
3929 case BUILT_IN_LONGJMP:
3930 /* We can't inline functions that call __builtin_longjmp at
3931 all. The non-local goto machinery really requires the
3932 destination be in a different function. If we allow the
3933 function calling __builtin_longjmp to be inlined into the
3934 function calling __builtin_setjmp, Things will Go Awry. */
3935 inline_forbidden_reason
3936 = G_("function %q+F can never be inlined because "
3937 "it uses setjmp-longjmp exception handling");
3938 *handled_ops_p = true;
3939 return t;
3941 case BUILT_IN_NONLOCAL_GOTO:
3942 /* Similarly. */
3943 inline_forbidden_reason
3944 = G_("function %q+F can never be inlined because "
3945 "it uses non-local goto");
3946 *handled_ops_p = true;
3947 return t;
3949 case BUILT_IN_RETURN:
3950 case BUILT_IN_APPLY_ARGS:
3951 /* If a __builtin_apply_args caller would be inlined,
3952 it would be saving arguments of the function it has
3953 been inlined into. Similarly __builtin_return would
3954 return from the function the inline has been inlined into. */
3955 inline_forbidden_reason
3956 = G_("function %q+F can never be inlined because "
3957 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3958 *handled_ops_p = true;
3959 return t;
3961 default:
3962 break;
3964 break;
3966 case GIMPLE_GOTO:
3967 t = gimple_goto_dest (stmt);
3969 /* We will not inline a function which uses computed goto. The
3970 addresses of its local labels, which may be tucked into
3971 global storage, are of course not constant across
3972 instantiations, which causes unexpected behavior. */
3973 if (TREE_CODE (t) != LABEL_DECL)
3975 inline_forbidden_reason
3976 = G_("function %q+F can never be inlined "
3977 "because it contains a computed goto");
3978 *handled_ops_p = true;
3979 return t;
3981 break;
3983 default:
3984 break;
3987 *handled_ops_p = false;
3988 return NULL_TREE;
3991 /* Return true if FNDECL is a function that cannot be inlined into
3992 another one. */
3994 static bool
3995 inline_forbidden_p (tree fndecl)
3997 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3998 struct walk_stmt_info wi;
3999 basic_block bb;
4000 bool forbidden_p = false;
4002 /* First check for shared reasons not to copy the code. */
4003 inline_forbidden_reason = copy_forbidden (fun);
4004 if (inline_forbidden_reason != NULL)
4005 return true;
4007 /* Next, walk the statements of the function looking for
4008 constraucts we can't handle, or are non-optimal for inlining. */
4009 hash_set<tree> visited_nodes;
4010 memset (&wi, 0, sizeof (wi));
4011 wi.info = (void *) fndecl;
4012 wi.pset = &visited_nodes;
4014 FOR_EACH_BB_FN (bb, fun)
4016 gimple *ret;
4017 gimple_seq seq = bb_seq (bb);
4018 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4019 forbidden_p = (ret != NULL);
4020 if (forbidden_p)
4021 break;
4024 return forbidden_p;
4027 /* Return false if the function FNDECL cannot be inlined on account of its
4028 attributes, true otherwise. */
4029 static bool
4030 function_attribute_inlinable_p (const_tree fndecl)
4032 if (targetm.attribute_table)
4034 const_tree a;
4036 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4038 const_tree name = get_attribute_name (a);
4039 int i;
4041 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4042 if (is_attribute_p (targetm.attribute_table[i].name, name))
4043 return targetm.function_attribute_inlinable_p (fndecl);
4047 return true;
4050 /* Returns nonzero if FN is a function that does not have any
4051 fundamental inline blocking properties. */
4053 bool
4054 tree_inlinable_function_p (tree fn)
4056 bool inlinable = true;
4057 bool do_warning;
4058 tree always_inline;
4060 /* If we've already decided this function shouldn't be inlined,
4061 there's no need to check again. */
4062 if (DECL_UNINLINABLE (fn))
4063 return false;
4065 /* We only warn for functions declared `inline' by the user. */
4066 do_warning = (opt_for_fn (fn, warn_inline)
4067 && DECL_DECLARED_INLINE_P (fn)
4068 && !DECL_NO_INLINE_WARNING_P (fn)
4069 && !DECL_IN_SYSTEM_HEADER (fn));
4071 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4073 if (flag_no_inline
4074 && always_inline == NULL)
4076 if (do_warning)
4077 warning (OPT_Winline, "function %q+F can never be inlined because it "
4078 "is suppressed using %<-fno-inline%>", fn);
4079 inlinable = false;
4082 else if (!function_attribute_inlinable_p (fn))
4084 if (do_warning)
4085 warning (OPT_Winline, "function %q+F can never be inlined because it "
4086 "uses attributes conflicting with inlining", fn);
4087 inlinable = false;
4090 else if (inline_forbidden_p (fn))
4092 /* See if we should warn about uninlinable functions. Previously,
4093 some of these warnings would be issued while trying to expand
4094 the function inline, but that would cause multiple warnings
4095 about functions that would for example call alloca. But since
4096 this a property of the function, just one warning is enough.
4097 As a bonus we can now give more details about the reason why a
4098 function is not inlinable. */
4099 if (always_inline)
4100 error (inline_forbidden_reason, fn);
4101 else if (do_warning)
4102 warning (OPT_Winline, inline_forbidden_reason, fn);
4104 inlinable = false;
4107 /* Squirrel away the result so that we don't have to check again. */
4108 DECL_UNINLINABLE (fn) = !inlinable;
4110 return inlinable;
4113 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4114 word size and take possible memcpy call into account and return
4115 cost based on whether optimizing for size or speed according to SPEED_P. */
4118 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4120 HOST_WIDE_INT size;
4122 gcc_assert (!VOID_TYPE_P (type));
4124 if (TREE_CODE (type) == VECTOR_TYPE)
4126 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4127 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4128 int orig_mode_size
4129 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4130 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4131 return ((orig_mode_size + simd_mode_size - 1)
4132 / simd_mode_size);
4135 size = int_size_in_bytes (type);
4137 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4138 /* Cost of a memcpy call, 3 arguments and the call. */
4139 return 4;
4140 else
4141 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4144 /* Returns cost of operation CODE, according to WEIGHTS */
4146 static int
4147 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4148 tree op1 ATTRIBUTE_UNUSED, tree op2)
4150 switch (code)
4152 /* These are "free" conversions, or their presumed cost
4153 is folded into other operations. */
4154 case RANGE_EXPR:
4155 CASE_CONVERT:
4156 case COMPLEX_EXPR:
4157 case PAREN_EXPR:
4158 case VIEW_CONVERT_EXPR:
4159 return 0;
4161 /* Assign cost of 1 to usual operations.
4162 ??? We may consider mapping RTL costs to this. */
4163 case COND_EXPR:
4164 case VEC_COND_EXPR:
4165 case VEC_PERM_EXPR:
4167 case PLUS_EXPR:
4168 case POINTER_PLUS_EXPR:
4169 case POINTER_DIFF_EXPR:
4170 case MINUS_EXPR:
4171 case MULT_EXPR:
4172 case MULT_HIGHPART_EXPR:
4174 case ADDR_SPACE_CONVERT_EXPR:
4175 case FIXED_CONVERT_EXPR:
4176 case FIX_TRUNC_EXPR:
4178 case NEGATE_EXPR:
4179 case FLOAT_EXPR:
4180 case MIN_EXPR:
4181 case MAX_EXPR:
4182 case ABS_EXPR:
4183 case ABSU_EXPR:
4185 case LSHIFT_EXPR:
4186 case RSHIFT_EXPR:
4187 case LROTATE_EXPR:
4188 case RROTATE_EXPR:
4190 case BIT_IOR_EXPR:
4191 case BIT_XOR_EXPR:
4192 case BIT_AND_EXPR:
4193 case BIT_NOT_EXPR:
4195 case TRUTH_ANDIF_EXPR:
4196 case TRUTH_ORIF_EXPR:
4197 case TRUTH_AND_EXPR:
4198 case TRUTH_OR_EXPR:
4199 case TRUTH_XOR_EXPR:
4200 case TRUTH_NOT_EXPR:
4202 case LT_EXPR:
4203 case LE_EXPR:
4204 case GT_EXPR:
4205 case GE_EXPR:
4206 case EQ_EXPR:
4207 case NE_EXPR:
4208 case ORDERED_EXPR:
4209 case UNORDERED_EXPR:
4211 case UNLT_EXPR:
4212 case UNLE_EXPR:
4213 case UNGT_EXPR:
4214 case UNGE_EXPR:
4215 case UNEQ_EXPR:
4216 case LTGT_EXPR:
4218 case CONJ_EXPR:
4220 case PREDECREMENT_EXPR:
4221 case PREINCREMENT_EXPR:
4222 case POSTDECREMENT_EXPR:
4223 case POSTINCREMENT_EXPR:
4225 case REALIGN_LOAD_EXPR:
4227 case WIDEN_SUM_EXPR:
4228 case WIDEN_MULT_EXPR:
4229 case DOT_PROD_EXPR:
4230 case SAD_EXPR:
4231 case WIDEN_MULT_PLUS_EXPR:
4232 case WIDEN_MULT_MINUS_EXPR:
4233 case WIDEN_LSHIFT_EXPR:
4235 case VEC_WIDEN_MULT_HI_EXPR:
4236 case VEC_WIDEN_MULT_LO_EXPR:
4237 case VEC_WIDEN_MULT_EVEN_EXPR:
4238 case VEC_WIDEN_MULT_ODD_EXPR:
4239 case VEC_UNPACK_HI_EXPR:
4240 case VEC_UNPACK_LO_EXPR:
4241 case VEC_UNPACK_FLOAT_HI_EXPR:
4242 case VEC_UNPACK_FLOAT_LO_EXPR:
4243 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4244 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4245 case VEC_PACK_TRUNC_EXPR:
4246 case VEC_PACK_SAT_EXPR:
4247 case VEC_PACK_FIX_TRUNC_EXPR:
4248 case VEC_PACK_FLOAT_EXPR:
4249 case VEC_WIDEN_LSHIFT_HI_EXPR:
4250 case VEC_WIDEN_LSHIFT_LO_EXPR:
4251 case VEC_DUPLICATE_EXPR:
4252 case VEC_SERIES_EXPR:
4254 return 1;
4256 /* Few special cases of expensive operations. This is useful
4257 to avoid inlining on functions having too many of these. */
4258 case TRUNC_DIV_EXPR:
4259 case CEIL_DIV_EXPR:
4260 case FLOOR_DIV_EXPR:
4261 case ROUND_DIV_EXPR:
4262 case EXACT_DIV_EXPR:
4263 case TRUNC_MOD_EXPR:
4264 case CEIL_MOD_EXPR:
4265 case FLOOR_MOD_EXPR:
4266 case ROUND_MOD_EXPR:
4267 case RDIV_EXPR:
4268 if (TREE_CODE (op2) != INTEGER_CST)
4269 return weights->div_mod_cost;
4270 return 1;
4272 /* Bit-field insertion needs several shift and mask operations. */
4273 case BIT_INSERT_EXPR:
4274 return 3;
4276 default:
4277 /* We expect a copy assignment with no operator. */
4278 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4279 return 0;
4284 /* Estimate number of instructions that will be created by expanding
4285 the statements in the statement sequence STMTS.
4286 WEIGHTS contains weights attributed to various constructs. */
4289 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4291 int cost;
4292 gimple_stmt_iterator gsi;
4294 cost = 0;
4295 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4296 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4298 return cost;
4302 /* Estimate number of instructions that will be created by expanding STMT.
4303 WEIGHTS contains weights attributed to various constructs. */
4306 estimate_num_insns (gimple *stmt, eni_weights *weights)
4308 unsigned cost, i;
4309 enum gimple_code code = gimple_code (stmt);
4310 tree lhs;
4311 tree rhs;
4313 switch (code)
4315 case GIMPLE_ASSIGN:
4316 /* Try to estimate the cost of assignments. We have three cases to
4317 deal with:
4318 1) Simple assignments to registers;
4319 2) Stores to things that must live in memory. This includes
4320 "normal" stores to scalars, but also assignments of large
4321 structures, or constructors of big arrays;
4323 Let us look at the first two cases, assuming we have "a = b + C":
4324 <GIMPLE_ASSIGN <var_decl "a">
4325 <plus_expr <var_decl "b"> <constant C>>
4326 If "a" is a GIMPLE register, the assignment to it is free on almost
4327 any target, because "a" usually ends up in a real register. Hence
4328 the only cost of this expression comes from the PLUS_EXPR, and we
4329 can ignore the GIMPLE_ASSIGN.
4330 If "a" is not a GIMPLE register, the assignment to "a" will most
4331 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4332 of moving something into "a", which we compute using the function
4333 estimate_move_cost. */
4334 if (gimple_clobber_p (stmt))
4335 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4337 lhs = gimple_assign_lhs (stmt);
4338 rhs = gimple_assign_rhs1 (stmt);
4340 cost = 0;
4342 /* Account for the cost of moving to / from memory. */
4343 if (gimple_store_p (stmt))
4344 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4345 if (gimple_assign_load_p (stmt))
4346 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4348 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4349 gimple_assign_rhs1 (stmt),
4350 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4351 == GIMPLE_BINARY_RHS
4352 ? gimple_assign_rhs2 (stmt) : NULL);
4353 break;
4355 case GIMPLE_COND:
4356 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4357 gimple_op (stmt, 0),
4358 gimple_op (stmt, 1));
4359 break;
4361 case GIMPLE_SWITCH:
4363 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4364 /* Take into account cost of the switch + guess 2 conditional jumps for
4365 each case label.
4367 TODO: once the switch expansion logic is sufficiently separated, we can
4368 do better job on estimating cost of the switch. */
4369 if (weights->time_based)
4370 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4371 else
4372 cost = gimple_switch_num_labels (switch_stmt) * 2;
4374 break;
4376 case GIMPLE_CALL:
4378 tree decl;
4380 if (gimple_call_internal_p (stmt))
4381 return 0;
4382 else if ((decl = gimple_call_fndecl (stmt))
4383 && fndecl_built_in_p (decl))
4385 /* Do not special case builtins where we see the body.
4386 This just confuse inliner. */
4387 struct cgraph_node *node;
4388 if (!(node = cgraph_node::get (decl))
4389 || node->definition)
4391 /* For buitins that are likely expanded to nothing or
4392 inlined do not account operand costs. */
4393 else if (is_simple_builtin (decl))
4394 return 0;
4395 else if (is_inexpensive_builtin (decl))
4396 return weights->target_builtin_call_cost;
4397 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4399 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4400 specialize the cheap expansion we do here.
4401 ??? This asks for a more general solution. */
4402 switch (DECL_FUNCTION_CODE (decl))
4404 case BUILT_IN_POW:
4405 case BUILT_IN_POWF:
4406 case BUILT_IN_POWL:
4407 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4408 && (real_equal
4409 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4410 &dconst2)))
4411 return estimate_operator_cost
4412 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4413 gimple_call_arg (stmt, 0));
4414 break;
4416 default:
4417 break;
4422 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4423 if (gimple_call_lhs (stmt))
4424 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4425 weights->time_based);
4426 for (i = 0; i < gimple_call_num_args (stmt); i++)
4428 tree arg = gimple_call_arg (stmt, i);
4429 cost += estimate_move_cost (TREE_TYPE (arg),
4430 weights->time_based);
4432 break;
4435 case GIMPLE_RETURN:
4436 return weights->return_cost;
4438 case GIMPLE_GOTO:
4439 case GIMPLE_LABEL:
4440 case GIMPLE_NOP:
4441 case GIMPLE_PHI:
4442 case GIMPLE_PREDICT:
4443 case GIMPLE_DEBUG:
4444 return 0;
4446 case GIMPLE_ASM:
4448 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4449 /* 1000 means infinity. This avoids overflows later
4450 with very long asm statements. */
4451 if (count > 1000)
4452 count = 1000;
4453 /* If this asm is asm inline, count anything as minimum size. */
4454 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4455 count = MIN (1, count);
4456 return MAX (1, count);
4459 case GIMPLE_RESX:
4460 /* This is either going to be an external function call with one
4461 argument, or two register copy statements plus a goto. */
4462 return 2;
4464 case GIMPLE_EH_DISPATCH:
4465 /* ??? This is going to turn into a switch statement. Ideally
4466 we'd have a look at the eh region and estimate the number of
4467 edges involved. */
4468 return 10;
4470 case GIMPLE_BIND:
4471 return estimate_num_insns_seq (
4472 gimple_bind_body (as_a <gbind *> (stmt)),
4473 weights);
4475 case GIMPLE_EH_FILTER:
4476 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4478 case GIMPLE_CATCH:
4479 return estimate_num_insns_seq (gimple_catch_handler (
4480 as_a <gcatch *> (stmt)),
4481 weights);
4483 case GIMPLE_TRY:
4484 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4485 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4487 /* OMP directives are generally very expensive. */
4489 case GIMPLE_OMP_RETURN:
4490 case GIMPLE_OMP_SECTIONS_SWITCH:
4491 case GIMPLE_OMP_ATOMIC_STORE:
4492 case GIMPLE_OMP_CONTINUE:
4493 /* ...except these, which are cheap. */
4494 return 0;
4496 case GIMPLE_OMP_ATOMIC_LOAD:
4497 return weights->omp_cost;
4499 case GIMPLE_OMP_FOR:
4500 return (weights->omp_cost
4501 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4502 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4504 case GIMPLE_OMP_PARALLEL:
4505 case GIMPLE_OMP_TASK:
4506 case GIMPLE_OMP_CRITICAL:
4507 case GIMPLE_OMP_MASTER:
4508 case GIMPLE_OMP_TASKGROUP:
4509 case GIMPLE_OMP_ORDERED:
4510 case GIMPLE_OMP_SCAN:
4511 case GIMPLE_OMP_SECTION:
4512 case GIMPLE_OMP_SECTIONS:
4513 case GIMPLE_OMP_SINGLE:
4514 case GIMPLE_OMP_TARGET:
4515 case GIMPLE_OMP_TEAMS:
4516 return (weights->omp_cost
4517 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4519 case GIMPLE_TRANSACTION:
4520 return (weights->tm_cost
4521 + estimate_num_insns_seq (gimple_transaction_body (
4522 as_a <gtransaction *> (stmt)),
4523 weights));
4525 default:
4526 gcc_unreachable ();
4529 return cost;
4532 /* Estimate number of instructions that will be created by expanding
4533 function FNDECL. WEIGHTS contains weights attributed to various
4534 constructs. */
4537 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4539 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4540 gimple_stmt_iterator bsi;
4541 basic_block bb;
4542 int n = 0;
4544 gcc_assert (my_function && my_function->cfg);
4545 FOR_EACH_BB_FN (bb, my_function)
4547 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4548 n += estimate_num_insns (gsi_stmt (bsi), weights);
4551 return n;
4555 /* Initializes weights used by estimate_num_insns. */
4557 void
4558 init_inline_once (void)
4560 eni_size_weights.call_cost = 1;
4561 eni_size_weights.indirect_call_cost = 3;
4562 eni_size_weights.target_builtin_call_cost = 1;
4563 eni_size_weights.div_mod_cost = 1;
4564 eni_size_weights.omp_cost = 40;
4565 eni_size_weights.tm_cost = 10;
4566 eni_size_weights.time_based = false;
4567 eni_size_weights.return_cost = 1;
4569 /* Estimating time for call is difficult, since we have no idea what the
4570 called function does. In the current uses of eni_time_weights,
4571 underestimating the cost does less harm than overestimating it, so
4572 we choose a rather small value here. */
4573 eni_time_weights.call_cost = 10;
4574 eni_time_weights.indirect_call_cost = 15;
4575 eni_time_weights.target_builtin_call_cost = 1;
4576 eni_time_weights.div_mod_cost = 10;
4577 eni_time_weights.omp_cost = 40;
4578 eni_time_weights.tm_cost = 40;
4579 eni_time_weights.time_based = true;
4580 eni_time_weights.return_cost = 2;
4584 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4586 static void
4587 prepend_lexical_block (tree current_block, tree new_block)
4589 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4590 BLOCK_SUBBLOCKS (current_block) = new_block;
4591 BLOCK_SUPERCONTEXT (new_block) = current_block;
4594 /* Add local variables from CALLEE to CALLER. */
4596 static inline void
4597 add_local_variables (struct function *callee, struct function *caller,
4598 copy_body_data *id)
4600 tree var;
4601 unsigned ix;
4603 FOR_EACH_LOCAL_DECL (callee, ix, var)
4604 if (!can_be_nonlocal (var, id))
4606 tree new_var = remap_decl (var, id);
4608 /* Remap debug-expressions. */
4609 if (VAR_P (new_var)
4610 && DECL_HAS_DEBUG_EXPR_P (var)
4611 && new_var != var)
4613 tree tem = DECL_DEBUG_EXPR (var);
4614 bool old_regimplify = id->regimplify;
4615 id->remapping_type_depth++;
4616 walk_tree (&tem, copy_tree_body_r, id, NULL);
4617 id->remapping_type_depth--;
4618 id->regimplify = old_regimplify;
4619 SET_DECL_DEBUG_EXPR (new_var, tem);
4620 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4622 add_local_decl (caller, new_var);
4626 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4627 have brought in or introduced any debug stmts for SRCVAR. */
4629 static inline void
4630 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4632 tree *remappedvarp = id->decl_map->get (srcvar);
4634 if (!remappedvarp)
4635 return;
4637 if (!VAR_P (*remappedvarp))
4638 return;
4640 if (*remappedvarp == id->retvar)
4641 return;
4643 tree tvar = target_for_debug_bind (*remappedvarp);
4644 if (!tvar)
4645 return;
4647 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4648 id->call_stmt);
4649 gimple_seq_add_stmt (bindings, stmt);
4652 /* For each inlined variable for which we may have debug bind stmts,
4653 add before GSI a final debug stmt resetting it, marking the end of
4654 its life, so that var-tracking knows it doesn't have to compute
4655 further locations for it. */
4657 static inline void
4658 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4660 tree var;
4661 unsigned ix;
4662 gimple_seq bindings = NULL;
4664 if (!gimple_in_ssa_p (id->src_cfun))
4665 return;
4667 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4668 return;
4670 for (var = DECL_ARGUMENTS (id->src_fn);
4671 var; var = DECL_CHAIN (var))
4672 reset_debug_binding (id, var, &bindings);
4674 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4675 reset_debug_binding (id, var, &bindings);
4677 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4680 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4682 static bool
4683 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4684 bitmap to_purge)
4686 tree use_retvar;
4687 tree fn;
4688 hash_map<tree, tree> *dst;
4689 hash_map<tree, tree> *st = NULL;
4690 tree return_slot;
4691 tree modify_dest;
4692 struct cgraph_edge *cg_edge;
4693 cgraph_inline_failed_t reason;
4694 basic_block return_block;
4695 edge e;
4696 gimple_stmt_iterator gsi, stmt_gsi;
4697 bool successfully_inlined = false;
4698 bool purge_dead_abnormal_edges;
4699 gcall *call_stmt;
4700 unsigned int prop_mask, src_properties;
4701 struct function *dst_cfun;
4702 tree simduid;
4703 use_operand_p use;
4704 gimple *simtenter_stmt = NULL;
4705 vec<tree> *simtvars_save;
4706 clone_info *info;
4708 /* The gimplifier uses input_location in too many places, such as
4709 internal_get_tmp_var (). */
4710 location_t saved_location = input_location;
4711 input_location = gimple_location (stmt);
4713 /* From here on, we're only interested in CALL_EXPRs. */
4714 call_stmt = dyn_cast <gcall *> (stmt);
4715 if (!call_stmt)
4716 goto egress;
4718 cg_edge = id->dst_node->get_edge (stmt);
4719 gcc_checking_assert (cg_edge);
4720 /* First, see if we can figure out what function is being called.
4721 If we cannot, then there is no hope of inlining the function. */
4722 if (cg_edge->indirect_unknown_callee)
4723 goto egress;
4724 fn = cg_edge->callee->decl;
4725 gcc_checking_assert (fn);
4727 /* If FN is a declaration of a function in a nested scope that was
4728 globally declared inline, we don't set its DECL_INITIAL.
4729 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4730 C++ front-end uses it for cdtors to refer to their internal
4731 declarations, that are not real functions. Fortunately those
4732 don't have trees to be saved, so we can tell by checking their
4733 gimple_body. */
4734 if (!DECL_INITIAL (fn)
4735 && DECL_ABSTRACT_ORIGIN (fn)
4736 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4737 fn = DECL_ABSTRACT_ORIGIN (fn);
4739 /* Don't try to inline functions that are not well-suited to inlining. */
4740 if (cg_edge->inline_failed)
4742 reason = cg_edge->inline_failed;
4743 /* If this call was originally indirect, we do not want to emit any
4744 inlining related warnings or sorry messages because there are no
4745 guarantees regarding those. */
4746 if (cg_edge->indirect_inlining_edge)
4747 goto egress;
4749 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4750 /* For extern inline functions that get redefined we always
4751 silently ignored always_inline flag. Better behavior would
4752 be to be able to keep both bodies and use extern inline body
4753 for inlining, but we can't do that because frontends overwrite
4754 the body. */
4755 && !cg_edge->callee->redefined_extern_inline
4756 /* During early inline pass, report only when optimization is
4757 not turned on. */
4758 && (symtab->global_info_ready
4759 || !optimize
4760 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4761 /* PR 20090218-1_0.c. Body can be provided by another module. */
4762 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4764 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4765 cgraph_inline_failed_string (reason));
4766 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4767 inform (gimple_location (stmt), "called from here");
4768 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4769 inform (DECL_SOURCE_LOCATION (cfun->decl),
4770 "called from this function");
4772 else if (opt_for_fn (fn, warn_inline)
4773 && DECL_DECLARED_INLINE_P (fn)
4774 && !DECL_NO_INLINE_WARNING_P (fn)
4775 && !DECL_IN_SYSTEM_HEADER (fn)
4776 && reason != CIF_UNSPECIFIED
4777 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4778 /* Do not warn about not inlined recursive calls. */
4779 && !cg_edge->recursive_p ()
4780 /* Avoid warnings during early inline pass. */
4781 && symtab->global_info_ready)
4783 auto_diagnostic_group d;
4784 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4785 fn, _(cgraph_inline_failed_string (reason))))
4787 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4788 inform (gimple_location (stmt), "called from here");
4789 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4790 inform (DECL_SOURCE_LOCATION (cfun->decl),
4791 "called from this function");
4794 goto egress;
4796 id->src_node = cg_edge->callee;
4798 /* If callee is thunk, all we need is to adjust the THIS pointer
4799 and redirect to function being thunked. */
4800 if (id->src_node->thunk)
4802 cgraph_edge *edge;
4803 tree virtual_offset = NULL;
4804 profile_count count = cg_edge->count;
4805 tree op;
4806 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4807 thunk_info *info = thunk_info::get (id->src_node);
4809 cgraph_edge::remove (cg_edge);
4810 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4811 gimple_uid (stmt),
4812 profile_count::one (),
4813 profile_count::one (),
4814 true);
4815 edge->count = count;
4816 if (info->virtual_offset_p)
4817 virtual_offset = size_int (info->virtual_value);
4818 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4819 NULL);
4820 gsi_insert_before (&iter, gimple_build_assign (op,
4821 gimple_call_arg (stmt, 0)),
4822 GSI_NEW_STMT);
4823 gcc_assert (info->this_adjusting);
4824 op = thunk_adjust (&iter, op, 1, info->fixed_offset,
4825 virtual_offset, info->indirect_offset);
4827 gimple_call_set_arg (stmt, 0, op);
4828 gimple_call_set_fndecl (stmt, edge->callee->decl);
4829 update_stmt (stmt);
4830 id->src_node->remove ();
4831 expand_call_inline (bb, stmt, id, to_purge);
4832 maybe_remove_unused_call_args (cfun, stmt);
4833 return true;
4835 fn = cg_edge->callee->decl;
4836 cg_edge->callee->get_untransformed_body ();
4838 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4839 cg_edge->callee->verify ();
4841 /* We will be inlining this callee. */
4842 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4844 /* Update the callers EH personality. */
4845 if (DECL_FUNCTION_PERSONALITY (fn))
4846 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4847 = DECL_FUNCTION_PERSONALITY (fn);
4849 /* Split the block before the GIMPLE_CALL. */
4850 stmt_gsi = gsi_for_stmt (stmt);
4851 gsi_prev (&stmt_gsi);
4852 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4853 bb = e->src;
4854 return_block = e->dest;
4855 remove_edge (e);
4857 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4858 been the source of abnormal edges. In this case, schedule
4859 the removal of dead abnormal edges. */
4860 gsi = gsi_start_bb (return_block);
4861 gsi_next (&gsi);
4862 purge_dead_abnormal_edges = gsi_end_p (gsi);
4864 stmt_gsi = gsi_start_bb (return_block);
4866 /* Build a block containing code to initialize the arguments, the
4867 actual inline expansion of the body, and a label for the return
4868 statements within the function to jump to. The type of the
4869 statement expression is the return type of the function call.
4870 ??? If the call does not have an associated block then we will
4871 remap all callee blocks to NULL, effectively dropping most of
4872 its debug information. This should only happen for calls to
4873 artificial decls inserted by the compiler itself. We need to
4874 either link the inlined blocks into the caller block tree or
4875 not refer to them in any way to not break GC for locations. */
4876 if (tree block = gimple_block (stmt))
4878 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4879 to make inlined_function_outer_scope_p return true on this BLOCK. */
4880 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4881 if (loc == UNKNOWN_LOCATION)
4882 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4883 if (loc == UNKNOWN_LOCATION)
4884 loc = BUILTINS_LOCATION;
4885 id->block = make_node (BLOCK);
4886 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4887 BLOCK_SOURCE_LOCATION (id->block) = loc;
4888 prepend_lexical_block (block, id->block);
4891 /* Local declarations will be replaced by their equivalents in this map. */
4892 st = id->decl_map;
4893 id->decl_map = new hash_map<tree, tree>;
4894 dst = id->debug_map;
4895 id->debug_map = NULL;
4896 if (flag_stack_reuse != SR_NONE)
4897 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4899 /* Record the function we are about to inline. */
4900 id->src_fn = fn;
4901 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4902 id->reset_location = DECL_IGNORED_P (fn);
4903 id->call_stmt = call_stmt;
4905 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4906 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4907 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4908 simtvars_save = id->dst_simt_vars;
4909 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4910 && (simduid = bb->loop_father->simduid) != NULL_TREE
4911 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4912 && single_imm_use (simduid, &use, &simtenter_stmt)
4913 && is_gimple_call (simtenter_stmt)
4914 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4915 vec_alloc (id->dst_simt_vars, 0);
4916 else
4917 id->dst_simt_vars = NULL;
4919 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4920 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4922 /* If the src function contains an IFN_VA_ARG, then so will the dst
4923 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4924 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4925 src_properties = id->src_cfun->curr_properties & prop_mask;
4926 if (src_properties != prop_mask)
4927 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4928 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4929 id->dst_node->calls_declare_variant_alt
4930 |= id->src_node->calls_declare_variant_alt;
4932 gcc_assert (!id->src_cfun->after_inlining);
4934 id->entry_bb = bb;
4935 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4937 gimple_stmt_iterator si = gsi_last_bb (bb);
4938 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4939 NOT_TAKEN),
4940 GSI_NEW_STMT);
4942 initialize_inlined_parameters (id, stmt, fn, bb);
4943 if (debug_nonbind_markers_p && debug_inline_points && id->block
4944 && inlined_function_outer_scope_p (id->block))
4946 gimple_stmt_iterator si = gsi_last_bb (bb);
4947 gsi_insert_after (&si, gimple_build_debug_inline_entry
4948 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4949 GSI_NEW_STMT);
4952 if (DECL_INITIAL (fn))
4954 if (gimple_block (stmt))
4956 tree *var;
4958 prepend_lexical_block (id->block,
4959 remap_blocks (DECL_INITIAL (fn), id));
4960 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4961 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4962 == NULL_TREE));
4963 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4964 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4965 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4966 under it. The parameters can be then evaluated in the debugger,
4967 but don't show in backtraces. */
4968 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4969 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4971 tree v = *var;
4972 *var = TREE_CHAIN (v);
4973 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4974 BLOCK_VARS (id->block) = v;
4976 else
4977 var = &TREE_CHAIN (*var);
4979 else
4980 remap_blocks_to_null (DECL_INITIAL (fn), id);
4983 /* Return statements in the function body will be replaced by jumps
4984 to the RET_LABEL. */
4985 gcc_assert (DECL_INITIAL (fn));
4986 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4988 /* Find the LHS to which the result of this call is assigned. */
4989 return_slot = NULL;
4990 if (gimple_call_lhs (stmt))
4992 modify_dest = gimple_call_lhs (stmt);
4994 /* The function which we are inlining might not return a value,
4995 in which case we should issue a warning that the function
4996 does not return a value. In that case the optimizers will
4997 see that the variable to which the value is assigned was not
4998 initialized. We do not want to issue a warning about that
4999 uninitialized variable. */
5000 if (DECL_P (modify_dest))
5001 TREE_NO_WARNING (modify_dest) = 1;
5003 if (gimple_call_return_slot_opt_p (call_stmt))
5005 return_slot = modify_dest;
5006 modify_dest = NULL;
5009 else
5010 modify_dest = NULL;
5012 /* If we are inlining a call to the C++ operator new, we don't want
5013 to use type based alias analysis on the return value. Otherwise
5014 we may get confused if the compiler sees that the inlined new
5015 function returns a pointer which was just deleted. See bug
5016 33407. */
5017 if (DECL_IS_OPERATOR_NEW_P (fn))
5019 return_slot = NULL;
5020 modify_dest = NULL;
5023 /* Declare the return variable for the function. */
5024 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5026 /* Add local vars in this inlined callee to caller. */
5027 add_local_variables (id->src_cfun, cfun, id);
5029 info = clone_info::get (id->src_node);
5030 if (info && info->performed_splits)
5032 clone_info *dst_info = clone_info::get_create (id->dst_node);
5033 /* Any calls from the inlined function will be turned into calls from the
5034 function we inline into. We must preserve notes about how to split
5035 parameters such calls should be redirected/updated. */
5036 unsigned len = vec_safe_length (info->performed_splits);
5037 for (unsigned i = 0; i < len; i++)
5039 ipa_param_performed_split ps
5040 = (*info->performed_splits)[i];
5041 ps.dummy_decl = remap_decl (ps.dummy_decl, id);
5042 vec_safe_push (dst_info->performed_splits, ps);
5045 if (flag_checking)
5047 len = vec_safe_length (dst_info->performed_splits);
5048 for (unsigned i = 0; i < len; i++)
5050 ipa_param_performed_split *ps1
5051 = &(*dst_info->performed_splits)[i];
5052 for (unsigned j = i + 1; j < len; j++)
5054 ipa_param_performed_split *ps2
5055 = &(*dst_info->performed_splits)[j];
5056 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
5057 || ps1->unit_offset != ps2->unit_offset);
5063 if (dump_enabled_p ())
5065 char buf[128];
5066 snprintf (buf, sizeof(buf), "%4.2f",
5067 cg_edge->sreal_frequency ().to_double ());
5068 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5069 call_stmt,
5070 "Inlining %C to %C with frequency %s\n",
5071 id->src_node, id->dst_node, buf);
5072 if (dump_file && (dump_flags & TDF_DETAILS))
5074 id->src_node->dump (dump_file);
5075 id->dst_node->dump (dump_file);
5079 /* This is it. Duplicate the callee body. Assume callee is
5080 pre-gimplified. Note that we must not alter the caller
5081 function in any way before this point, as this CALL_EXPR may be
5082 a self-referential call; if we're calling ourselves, we need to
5083 duplicate our body before altering anything. */
5084 copy_body (id, bb, return_block, NULL);
5086 reset_debug_bindings (id, stmt_gsi);
5088 if (flag_stack_reuse != SR_NONE)
5089 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5090 if (!TREE_THIS_VOLATILE (p))
5092 tree *varp = id->decl_map->get (p);
5093 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
5095 tree clobber = build_clobber (TREE_TYPE (*varp));
5096 gimple *clobber_stmt;
5097 clobber_stmt = gimple_build_assign (*varp, clobber);
5098 gimple_set_location (clobber_stmt, gimple_location (stmt));
5099 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5103 /* Reset the escaped solution. */
5104 if (cfun->gimple_df)
5105 pt_solution_reset (&cfun->gimple_df->escaped);
5107 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5108 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5110 size_t nargs = gimple_call_num_args (simtenter_stmt);
5111 vec<tree> *vars = id->dst_simt_vars;
5112 auto_vec<tree> newargs (nargs + vars->length ());
5113 for (size_t i = 0; i < nargs; i++)
5114 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5115 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5117 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5118 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5120 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5121 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5122 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5123 gsi_replace (&gsi, g, false);
5125 vec_free (id->dst_simt_vars);
5126 id->dst_simt_vars = simtvars_save;
5128 /* Clean up. */
5129 if (id->debug_map)
5131 delete id->debug_map;
5132 id->debug_map = dst;
5134 delete id->decl_map;
5135 id->decl_map = st;
5137 /* Unlink the calls virtual operands before replacing it. */
5138 unlink_stmt_vdef (stmt);
5139 if (gimple_vdef (stmt)
5140 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5141 release_ssa_name (gimple_vdef (stmt));
5143 /* If the inlined function returns a result that we care about,
5144 substitute the GIMPLE_CALL with an assignment of the return
5145 variable to the LHS of the call. That is, if STMT was
5146 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5147 if (use_retvar && gimple_call_lhs (stmt))
5149 gimple *old_stmt = stmt;
5150 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5151 gimple_set_location (stmt, gimple_location (old_stmt));
5152 gsi_replace (&stmt_gsi, stmt, false);
5153 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5154 /* Append a clobber for id->retvar if easily possible. */
5155 if (flag_stack_reuse != SR_NONE
5156 && id->retvar
5157 && VAR_P (id->retvar)
5158 && id->retvar != return_slot
5159 && id->retvar != modify_dest
5160 && !TREE_THIS_VOLATILE (id->retvar)
5161 && !is_gimple_reg (id->retvar)
5162 && !stmt_ends_bb_p (stmt))
5164 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5165 gimple *clobber_stmt;
5166 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5167 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5168 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5171 else
5173 /* Handle the case of inlining a function with no return
5174 statement, which causes the return value to become undefined. */
5175 if (gimple_call_lhs (stmt)
5176 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5178 tree name = gimple_call_lhs (stmt);
5179 tree var = SSA_NAME_VAR (name);
5180 tree def = var ? ssa_default_def (cfun, var) : NULL;
5182 if (def)
5184 /* If the variable is used undefined, make this name
5185 undefined via a move. */
5186 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5187 gsi_replace (&stmt_gsi, stmt, true);
5189 else
5191 if (!var)
5193 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5194 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5196 /* Otherwise make this variable undefined. */
5197 gsi_remove (&stmt_gsi, true);
5198 set_ssa_default_def (cfun, var, name);
5199 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5202 /* Replace with a clobber for id->retvar. */
5203 else if (flag_stack_reuse != SR_NONE
5204 && id->retvar
5205 && VAR_P (id->retvar)
5206 && id->retvar != return_slot
5207 && id->retvar != modify_dest
5208 && !TREE_THIS_VOLATILE (id->retvar)
5209 && !is_gimple_reg (id->retvar))
5211 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5212 gimple *clobber_stmt;
5213 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5214 gimple_set_location (clobber_stmt, gimple_location (stmt));
5215 gsi_replace (&stmt_gsi, clobber_stmt, false);
5216 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5218 else
5219 gsi_remove (&stmt_gsi, true);
5222 if (purge_dead_abnormal_edges)
5223 bitmap_set_bit (to_purge, return_block->index);
5225 /* If the value of the new expression is ignored, that's OK. We
5226 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5227 the equivalent inlined version either. */
5228 if (is_gimple_assign (stmt))
5230 gcc_assert (gimple_assign_single_p (stmt)
5231 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5232 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5235 id->add_clobbers_to_eh_landing_pads = 0;
5237 /* Output the inlining info for this abstract function, since it has been
5238 inlined. If we don't do this now, we can lose the information about the
5239 variables in the function when the blocks get blown away as soon as we
5240 remove the cgraph node. */
5241 if (gimple_block (stmt))
5242 (*debug_hooks->outlining_inline_function) (fn);
5244 /* Update callgraph if needed. */
5245 cg_edge->callee->remove ();
5247 id->block = NULL_TREE;
5248 id->retvar = NULL_TREE;
5249 successfully_inlined = true;
5251 egress:
5252 input_location = saved_location;
5253 return successfully_inlined;
5256 /* Expand call statements reachable from STMT_P.
5257 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5258 in a MODIFY_EXPR. */
5260 static bool
5261 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5262 bitmap to_purge)
5264 gimple_stmt_iterator gsi;
5265 bool inlined = false;
5267 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5269 gimple *stmt = gsi_stmt (gsi);
5270 gsi_prev (&gsi);
5272 if (is_gimple_call (stmt)
5273 && !gimple_call_internal_p (stmt))
5274 inlined |= expand_call_inline (bb, stmt, id, to_purge);
5277 return inlined;
5281 /* Walk all basic blocks created after FIRST and try to fold every statement
5282 in the STATEMENTS pointer set. */
5284 static void
5285 fold_marked_statements (int first, hash_set<gimple *> *statements)
5287 auto_bitmap to_purge;
5289 auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5290 auto_sbitmap visited (last_basic_block_for_fn (cfun));
5291 bitmap_clear (visited);
5293 stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5294 while (!stack.is_empty ())
5296 /* Look at the edge on the top of the stack. */
5297 edge e = stack.pop ();
5298 basic_block dest = e->dest;
5300 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5301 || bitmap_bit_p (visited, dest->index))
5302 continue;
5304 bitmap_set_bit (visited, dest->index);
5306 if (dest->index >= first)
5307 for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5308 !gsi_end_p (gsi); gsi_next (&gsi))
5310 if (!statements->contains (gsi_stmt (gsi)))
5311 continue;
5313 gimple *old_stmt = gsi_stmt (gsi);
5314 tree old_decl = (is_gimple_call (old_stmt)
5315 ? gimple_call_fndecl (old_stmt) : 0);
5316 if (old_decl && fndecl_built_in_p (old_decl))
5318 /* Folding builtins can create multiple instructions,
5319 we need to look at all of them. */
5320 gimple_stmt_iterator i2 = gsi;
5321 gsi_prev (&i2);
5322 if (fold_stmt (&gsi))
5324 gimple *new_stmt;
5325 /* If a builtin at the end of a bb folded into nothing,
5326 the following loop won't work. */
5327 if (gsi_end_p (gsi))
5329 cgraph_update_edges_for_call_stmt (old_stmt,
5330 old_decl, NULL);
5331 break;
5333 if (gsi_end_p (i2))
5334 i2 = gsi_start_bb (dest);
5335 else
5336 gsi_next (&i2);
5337 while (1)
5339 new_stmt = gsi_stmt (i2);
5340 update_stmt (new_stmt);
5341 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5342 new_stmt);
5344 if (new_stmt == gsi_stmt (gsi))
5346 /* It is okay to check only for the very last
5347 of these statements. If it is a throwing
5348 statement nothing will change. If it isn't
5349 this can remove EH edges. If that weren't
5350 correct then because some intermediate stmts
5351 throw, but not the last one. That would mean
5352 we'd have to split the block, which we can't
5353 here and we'd loose anyway. And as builtins
5354 probably never throw, this all
5355 is mood anyway. */
5356 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5357 new_stmt))
5358 bitmap_set_bit (to_purge, dest->index);
5359 break;
5361 gsi_next (&i2);
5365 else if (fold_stmt (&gsi))
5367 /* Re-read the statement from GSI as fold_stmt() may
5368 have changed it. */
5369 gimple *new_stmt = gsi_stmt (gsi);
5370 update_stmt (new_stmt);
5372 if (is_gimple_call (old_stmt)
5373 || is_gimple_call (new_stmt))
5374 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5375 new_stmt);
5377 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5378 bitmap_set_bit (to_purge, dest->index);
5382 if (EDGE_COUNT (dest->succs) > 0)
5384 /* Avoid warnings emitted from folding statements that
5385 became unreachable because of inlined function parameter
5386 propagation. */
5387 e = find_taken_edge (dest, NULL_TREE);
5388 if (e)
5389 stack.quick_push (e);
5390 else
5392 edge_iterator ei;
5393 FOR_EACH_EDGE (e, ei, dest->succs)
5394 stack.safe_push (e);
5399 gimple_purge_all_dead_eh_edges (to_purge);
5402 /* Expand calls to inline functions in the body of FN. */
5404 unsigned int
5405 optimize_inline_calls (tree fn)
5407 copy_body_data id;
5408 basic_block bb;
5409 int last = n_basic_blocks_for_fn (cfun);
5410 bool inlined_p = false;
5412 /* Clear out ID. */
5413 memset (&id, 0, sizeof (id));
5415 id.src_node = id.dst_node = cgraph_node::get (fn);
5416 gcc_assert (id.dst_node->definition);
5417 id.dst_fn = fn;
5418 /* Or any functions that aren't finished yet. */
5419 if (current_function_decl)
5420 id.dst_fn = current_function_decl;
5422 id.copy_decl = copy_decl_maybe_to_var;
5423 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5424 id.transform_new_cfg = false;
5425 id.transform_return_to_modify = true;
5426 id.transform_parameter = true;
5427 id.transform_lang_insert_block = NULL;
5428 id.statements_to_fold = new hash_set<gimple *>;
5430 push_gimplify_context ();
5432 /* We make no attempts to keep dominance info up-to-date. */
5433 free_dominance_info (CDI_DOMINATORS);
5434 free_dominance_info (CDI_POST_DOMINATORS);
5436 /* Register specific gimple functions. */
5437 gimple_register_cfg_hooks ();
5439 /* Reach the trees by walking over the CFG, and note the
5440 enclosing basic-blocks in the call edges. */
5441 /* We walk the blocks going forward, because inlined function bodies
5442 will split id->current_basic_block, and the new blocks will
5443 follow it; we'll trudge through them, processing their CALL_EXPRs
5444 along the way. */
5445 auto_bitmap to_purge;
5446 FOR_EACH_BB_FN (bb, cfun)
5447 inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5449 pop_gimplify_context (NULL);
5451 if (flag_checking)
5453 struct cgraph_edge *e;
5455 id.dst_node->verify ();
5457 /* Double check that we inlined everything we are supposed to inline. */
5458 for (e = id.dst_node->callees; e; e = e->next_callee)
5459 gcc_assert (e->inline_failed);
5462 /* If we didn't inline into the function there is nothing to do. */
5463 if (!inlined_p)
5465 delete id.statements_to_fold;
5466 return 0;
5469 /* Fold queued statements. */
5470 update_max_bb_count ();
5471 fold_marked_statements (last, id.statements_to_fold);
5472 delete id.statements_to_fold;
5474 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5475 We need to do this after fold_marked_statements since that may walk
5476 the SSA use-def chain. */
5477 unsigned i;
5478 bitmap_iterator bi;
5479 EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5481 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5482 if (bb)
5484 gimple_purge_dead_eh_edges (bb);
5485 gimple_purge_dead_abnormal_call_edges (bb);
5489 gcc_assert (!id.debug_stmts.exists ());
5491 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5492 number_blocks (fn);
5494 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5495 id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5497 if (flag_checking)
5498 id.dst_node->verify ();
5500 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5501 not possible yet - the IPA passes might make various functions to not
5502 throw and they don't care to proactively update local EH info. This is
5503 done later in fixup_cfg pass that also execute the verification. */
5504 return (TODO_update_ssa
5505 | TODO_cleanup_cfg
5506 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5507 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5508 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5509 ? TODO_rebuild_frequencies : 0));
5512 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5514 tree
5515 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5517 enum tree_code code = TREE_CODE (*tp);
5518 enum tree_code_class cl = TREE_CODE_CLASS (code);
5520 /* We make copies of most nodes. */
5521 if (IS_EXPR_CODE_CLASS (cl)
5522 || code == TREE_LIST
5523 || code == TREE_VEC
5524 || code == TYPE_DECL
5525 || code == OMP_CLAUSE)
5527 /* Because the chain gets clobbered when we make a copy, we save it
5528 here. */
5529 tree chain = NULL_TREE, new_tree;
5531 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5532 chain = TREE_CHAIN (*tp);
5534 /* Copy the node. */
5535 new_tree = copy_node (*tp);
5537 *tp = new_tree;
5539 /* Now, restore the chain, if appropriate. That will cause
5540 walk_tree to walk into the chain as well. */
5541 if (code == PARM_DECL
5542 || code == TREE_LIST
5543 || code == OMP_CLAUSE)
5544 TREE_CHAIN (*tp) = chain;
5546 /* For now, we don't update BLOCKs when we make copies. So, we
5547 have to nullify all BIND_EXPRs. */
5548 if (TREE_CODE (*tp) == BIND_EXPR)
5549 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5551 else if (code == CONSTRUCTOR)
5553 /* CONSTRUCTOR nodes need special handling because
5554 we need to duplicate the vector of elements. */
5555 tree new_tree;
5557 new_tree = copy_node (*tp);
5558 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5559 *tp = new_tree;
5561 else if (code == STATEMENT_LIST)
5562 /* We used to just abort on STATEMENT_LIST, but we can run into them
5563 with statement-expressions (c++/40975). */
5564 copy_statement_list (tp);
5565 else if (TREE_CODE_CLASS (code) == tcc_type)
5566 *walk_subtrees = 0;
5567 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5568 *walk_subtrees = 0;
5569 else if (TREE_CODE_CLASS (code) == tcc_constant)
5570 *walk_subtrees = 0;
5571 return NULL_TREE;
5574 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5575 information indicating to what new SAVE_EXPR this one should be mapped,
5576 use that one. Otherwise, create a new node and enter it in ST. FN is
5577 the function into which the copy will be placed. */
5579 static void
5580 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5582 tree *n;
5583 tree t;
5585 /* See if we already encountered this SAVE_EXPR. */
5586 n = st->get (*tp);
5588 /* If we didn't already remap this SAVE_EXPR, do so now. */
5589 if (!n)
5591 t = copy_node (*tp);
5593 /* Remember this SAVE_EXPR. */
5594 st->put (*tp, t);
5595 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5596 st->put (t, t);
5598 else
5600 /* We've already walked into this SAVE_EXPR; don't do it again. */
5601 *walk_subtrees = 0;
5602 t = *n;
5605 /* Replace this SAVE_EXPR with the copy. */
5606 *tp = t;
5609 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5610 label, copies the declaration and enters it in the splay_tree in DATA (which
5611 is really a 'copy_body_data *'. */
5613 static tree
5614 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5615 bool *handled_ops_p ATTRIBUTE_UNUSED,
5616 struct walk_stmt_info *wi)
5618 copy_body_data *id = (copy_body_data *) wi->info;
5619 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5621 if (stmt)
5623 tree decl = gimple_label_label (stmt);
5625 /* Copy the decl and remember the copy. */
5626 insert_decl_map (id, decl, id->copy_decl (decl, id));
5629 return NULL_TREE;
5632 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5633 struct walk_stmt_info *wi);
5635 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5636 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5637 remaps all local declarations to appropriate replacements in gimple
5638 operands. */
5640 static tree
5641 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5643 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5644 copy_body_data *id = (copy_body_data *) wi->info;
5645 hash_map<tree, tree> *st = id->decl_map;
5646 tree *n;
5647 tree expr = *tp;
5649 /* For recursive invocations this is no longer the LHS itself. */
5650 bool is_lhs = wi->is_lhs;
5651 wi->is_lhs = false;
5653 if (TREE_CODE (expr) == SSA_NAME)
5655 *tp = remap_ssa_name (*tp, id);
5656 *walk_subtrees = 0;
5657 if (is_lhs)
5658 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5660 /* Only a local declaration (variable or label). */
5661 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5662 || TREE_CODE (expr) == LABEL_DECL)
5664 /* Lookup the declaration. */
5665 n = st->get (expr);
5667 /* If it's there, remap it. */
5668 if (n)
5669 *tp = *n;
5670 *walk_subtrees = 0;
5672 else if (TREE_CODE (expr) == STATEMENT_LIST
5673 || TREE_CODE (expr) == BIND_EXPR
5674 || TREE_CODE (expr) == SAVE_EXPR)
5675 gcc_unreachable ();
5676 else if (TREE_CODE (expr) == TARGET_EXPR)
5678 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5679 It's OK for this to happen if it was part of a subtree that
5680 isn't immediately expanded, such as operand 2 of another
5681 TARGET_EXPR. */
5682 if (!TREE_OPERAND (expr, 1))
5684 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5685 TREE_OPERAND (expr, 3) = NULL_TREE;
5688 else if (TREE_CODE (expr) == OMP_CLAUSE)
5690 /* Before the omplower pass completes, some OMP clauses can contain
5691 sequences that are neither copied by gimple_seq_copy nor walked by
5692 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5693 in those situations, we have to copy and process them explicitely. */
5695 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5697 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5698 seq = duplicate_remap_omp_clause_seq (seq, wi);
5699 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5701 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5703 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5704 seq = duplicate_remap_omp_clause_seq (seq, wi);
5705 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5707 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5709 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5710 seq = duplicate_remap_omp_clause_seq (seq, wi);
5711 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5712 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5713 seq = duplicate_remap_omp_clause_seq (seq, wi);
5714 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5718 /* Keep iterating. */
5719 return NULL_TREE;
5723 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5724 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5725 remaps all local declarations to appropriate replacements in gimple
5726 statements. */
5728 static tree
5729 replace_locals_stmt (gimple_stmt_iterator *gsip,
5730 bool *handled_ops_p ATTRIBUTE_UNUSED,
5731 struct walk_stmt_info *wi)
5733 copy_body_data *id = (copy_body_data *) wi->info;
5734 gimple *gs = gsi_stmt (*gsip);
5736 if (gbind *stmt = dyn_cast <gbind *> (gs))
5738 tree block = gimple_bind_block (stmt);
5740 if (block)
5742 remap_block (&block, id);
5743 gimple_bind_set_block (stmt, block);
5746 /* This will remap a lot of the same decls again, but this should be
5747 harmless. */
5748 if (gimple_bind_vars (stmt))
5750 tree old_var, decls = gimple_bind_vars (stmt);
5752 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5753 if (!can_be_nonlocal (old_var, id)
5754 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5755 remap_decl (old_var, id);
5757 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5758 id->prevent_decl_creation_for_types = true;
5759 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5760 id->prevent_decl_creation_for_types = false;
5764 /* Keep iterating. */
5765 return NULL_TREE;
5768 /* Create a copy of SEQ and remap all decls in it. */
5770 static gimple_seq
5771 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5773 if (!seq)
5774 return NULL;
5776 /* If there are any labels in OMP sequences, they can be only referred to in
5777 the sequence itself and therefore we can do both here. */
5778 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5779 gimple_seq copy = gimple_seq_copy (seq);
5780 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5781 return copy;
5784 /* Copies everything in SEQ and replaces variables and labels local to
5785 current_function_decl. */
5787 gimple_seq
5788 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5790 copy_body_data id;
5791 struct walk_stmt_info wi;
5792 gimple_seq copy;
5794 /* There's nothing to do for NULL_TREE. */
5795 if (seq == NULL)
5796 return seq;
5798 /* Set up ID. */
5799 memset (&id, 0, sizeof (id));
5800 id.src_fn = current_function_decl;
5801 id.dst_fn = current_function_decl;
5802 id.src_cfun = cfun;
5803 id.decl_map = new hash_map<tree, tree>;
5804 id.debug_map = NULL;
5806 id.copy_decl = copy_decl_no_change;
5807 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5808 id.transform_new_cfg = false;
5809 id.transform_return_to_modify = false;
5810 id.transform_parameter = false;
5811 id.transform_lang_insert_block = NULL;
5813 /* Walk the tree once to find local labels. */
5814 memset (&wi, 0, sizeof (wi));
5815 hash_set<tree> visited;
5816 wi.info = &id;
5817 wi.pset = &visited;
5818 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5820 copy = gimple_seq_copy (seq);
5822 /* Walk the copy, remapping decls. */
5823 memset (&wi, 0, sizeof (wi));
5824 wi.info = &id;
5825 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5827 /* Clean up. */
5828 delete id.decl_map;
5829 if (id.debug_map)
5830 delete id.debug_map;
5831 if (id.dependence_map)
5833 delete id.dependence_map;
5834 id.dependence_map = NULL;
5837 return copy;
5841 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5843 static tree
5844 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5846 if (*tp == data)
5847 return (tree) data;
5848 else
5849 return NULL;
5852 DEBUG_FUNCTION bool
5853 debug_find_tree (tree top, tree search)
5855 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5859 /* Declare the variables created by the inliner. Add all the variables in
5860 VARS to BIND_EXPR. */
5862 static void
5863 declare_inline_vars (tree block, tree vars)
5865 tree t;
5866 for (t = vars; t; t = DECL_CHAIN (t))
5868 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5869 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5870 add_local_decl (cfun, t);
5873 if (block)
5874 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5877 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5878 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5879 VAR_DECL translation. */
5881 tree
5882 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5884 /* Don't generate debug information for the copy if we wouldn't have
5885 generated it for the copy either. */
5886 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5887 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5889 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5890 declaration inspired this copy. */
5891 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5893 /* The new variable/label has no RTL, yet. */
5894 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5895 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5896 SET_DECL_RTL (copy, 0);
5897 /* For vector typed decls make sure to update DECL_MODE according
5898 to the new function context. */
5899 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5900 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5902 /* These args would always appear unused, if not for this. */
5903 TREE_USED (copy) = 1;
5905 /* Set the context for the new declaration. */
5906 if (!DECL_CONTEXT (decl))
5907 /* Globals stay global. */
5909 else if (DECL_CONTEXT (decl) != id->src_fn)
5910 /* Things that weren't in the scope of the function we're inlining
5911 from aren't in the scope we're inlining to, either. */
5913 else if (TREE_STATIC (decl))
5914 /* Function-scoped static variables should stay in the original
5915 function. */
5917 else
5919 /* Ordinary automatic local variables are now in the scope of the
5920 new function. */
5921 DECL_CONTEXT (copy) = id->dst_fn;
5922 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5924 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5925 DECL_ATTRIBUTES (copy)
5926 = tree_cons (get_identifier ("omp simt private"), NULL,
5927 DECL_ATTRIBUTES (copy));
5928 id->dst_simt_vars->safe_push (copy);
5932 return copy;
5935 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5936 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5937 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5939 tree
5940 copy_decl_to_var (tree decl, copy_body_data *id)
5942 tree copy, type;
5944 gcc_assert (TREE_CODE (decl) == PARM_DECL
5945 || TREE_CODE (decl) == RESULT_DECL);
5947 type = TREE_TYPE (decl);
5949 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5950 VAR_DECL, DECL_NAME (decl), type);
5951 if (DECL_PT_UID_SET_P (decl))
5952 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5953 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5954 TREE_READONLY (copy) = TREE_READONLY (decl);
5955 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5956 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
5957 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
5959 return copy_decl_for_dup_finish (id, decl, copy);
5962 /* Like copy_decl_to_var, but create a return slot object instead of a
5963 pointer variable for return by invisible reference. */
5965 static tree
5966 copy_result_decl_to_var (tree decl, copy_body_data *id)
5968 tree copy, type;
5970 gcc_assert (TREE_CODE (decl) == PARM_DECL
5971 || TREE_CODE (decl) == RESULT_DECL);
5973 type = TREE_TYPE (decl);
5974 if (DECL_BY_REFERENCE (decl))
5975 type = TREE_TYPE (type);
5977 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5978 VAR_DECL, DECL_NAME (decl), type);
5979 if (DECL_PT_UID_SET_P (decl))
5980 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5981 TREE_READONLY (copy) = TREE_READONLY (decl);
5982 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5983 if (!DECL_BY_REFERENCE (decl))
5985 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5986 DECL_NOT_GIMPLE_REG_P (copy)
5987 = (DECL_NOT_GIMPLE_REG_P (decl)
5988 /* RESULT_DECLs are treated special by needs_to_live_in_memory,
5989 mirror that to the created VAR_DECL. */
5990 || (TREE_CODE (decl) == RESULT_DECL
5991 && aggregate_value_p (decl, id->src_fn)));
5994 return copy_decl_for_dup_finish (id, decl, copy);
5997 tree
5998 copy_decl_no_change (tree decl, copy_body_data *id)
6000 tree copy;
6002 copy = copy_node (decl);
6004 /* The COPY is not abstract; it will be generated in DST_FN. */
6005 DECL_ABSTRACT_P (copy) = false;
6006 lang_hooks.dup_lang_specific_decl (copy);
6008 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6009 been taken; it's for internal bookkeeping in expand_goto_internal. */
6010 if (TREE_CODE (copy) == LABEL_DECL)
6012 TREE_ADDRESSABLE (copy) = 0;
6013 LABEL_DECL_UID (copy) = -1;
6016 return copy_decl_for_dup_finish (id, decl, copy);
6019 static tree
6020 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6022 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6023 return copy_decl_to_var (decl, id);
6024 else
6025 return copy_decl_no_change (decl, id);
6028 /* Return a copy of the function's argument tree without any modifications. */
6030 static tree
6031 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6033 tree arg, *parg;
6034 tree new_parm = NULL;
6036 parg = &new_parm;
6037 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6039 tree new_tree = remap_decl (arg, id);
6040 if (TREE_CODE (new_tree) != PARM_DECL)
6041 new_tree = id->copy_decl (arg, id);
6042 lang_hooks.dup_lang_specific_decl (new_tree);
6043 *parg = new_tree;
6044 parg = &DECL_CHAIN (new_tree);
6046 return new_parm;
6049 /* Return a copy of the function's static chain. */
6050 static tree
6051 copy_static_chain (tree static_chain, copy_body_data * id)
6053 tree *chain_copy, *pvar;
6055 chain_copy = &static_chain;
6056 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6058 tree new_tree = remap_decl (*pvar, id);
6059 lang_hooks.dup_lang_specific_decl (new_tree);
6060 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6061 *pvar = new_tree;
6063 return static_chain;
6066 /* Return true if the function is allowed to be versioned.
6067 This is a guard for the versioning functionality. */
6069 bool
6070 tree_versionable_function_p (tree fndecl)
6072 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6073 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6076 /* Update clone info after duplication. */
6078 static void
6079 update_clone_info (copy_body_data * id)
6081 clone_info *dst_info = clone_info::get (id->dst_node);
6082 vec<ipa_param_performed_split, va_gc> *cur_performed_splits
6083 = dst_info ? dst_info->performed_splits : NULL;
6084 if (cur_performed_splits)
6086 unsigned len = cur_performed_splits->length ();
6087 for (unsigned i = 0; i < len; i++)
6089 ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
6090 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6094 struct cgraph_node *node;
6095 if (!id->dst_node->clones)
6096 return;
6097 for (node = id->dst_node->clones; node != id->dst_node;)
6099 /* First update replace maps to match the new body. */
6100 clone_info *info = clone_info::get (node);
6101 if (info && info->tree_map)
6103 unsigned int i;
6104 for (i = 0; i < vec_safe_length (info->tree_map); i++)
6106 struct ipa_replace_map *replace_info;
6107 replace_info = (*info->tree_map)[i];
6108 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6111 if (info && info->performed_splits)
6113 unsigned len = vec_safe_length (info->performed_splits);
6114 for (unsigned i = 0; i < len; i++)
6116 ipa_param_performed_split *ps
6117 = &(*info->performed_splits)[i];
6118 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6121 if (unsigned len = vec_safe_length (cur_performed_splits))
6123 /* We do not want to add current performed splits when we are saving
6124 a copy of function body for later during inlining, that would just
6125 duplicate all entries. So let's have a look whether anything
6126 referring to the first dummy_decl is present. */
6127 if (!info)
6128 info = clone_info::get_create (node);
6129 unsigned dst_len = vec_safe_length (info->performed_splits);
6130 ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6131 for (unsigned i = 0; i < dst_len; i++)
6132 if ((*info->performed_splits)[i].dummy_decl
6133 == first->dummy_decl)
6135 len = 0;
6136 break;
6139 for (unsigned i = 0; i < len; i++)
6140 vec_safe_push (info->performed_splits,
6141 (*cur_performed_splits)[i]);
6142 if (flag_checking)
6144 for (unsigned i = 0; i < dst_len; i++)
6146 ipa_param_performed_split *ps1
6147 = &(*info->performed_splits)[i];
6148 for (unsigned j = i + 1; j < dst_len; j++)
6150 ipa_param_performed_split *ps2
6151 = &(*info->performed_splits)[j];
6152 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6153 || ps1->unit_offset != ps2->unit_offset);
6159 if (node->clones)
6160 node = node->clones;
6161 else if (node->next_sibling_clone)
6162 node = node->next_sibling_clone;
6163 else
6165 while (node != id->dst_node && !node->next_sibling_clone)
6166 node = node->clone_of;
6167 if (node != id->dst_node)
6168 node = node->next_sibling_clone;
6173 /* Create a copy of a function's tree.
6174 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6175 of the original function and the new copied function
6176 respectively. In case we want to replace a DECL
6177 tree with another tree while duplicating the function's
6178 body, TREE_MAP represents the mapping between these
6179 trees. If UPDATE_CLONES is set, the call_stmt fields
6180 of edges of clones of the function will be updated.
6182 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6183 function parameters and return value) should be modified).
6184 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6185 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6187 void
6188 tree_function_versioning (tree old_decl, tree new_decl,
6189 vec<ipa_replace_map *, va_gc> *tree_map,
6190 ipa_param_adjustments *param_adjustments,
6191 bool update_clones, bitmap blocks_to_copy,
6192 basic_block new_entry)
6194 struct cgraph_node *old_version_node;
6195 struct cgraph_node *new_version_node;
6196 copy_body_data id;
6197 tree p;
6198 unsigned i;
6199 struct ipa_replace_map *replace_info;
6200 basic_block old_entry_block, bb;
6201 auto_vec<gimple *, 10> init_stmts;
6202 tree vars = NULL_TREE;
6204 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6205 && TREE_CODE (new_decl) == FUNCTION_DECL);
6206 DECL_POSSIBLY_INLINED (old_decl) = 1;
6208 old_version_node = cgraph_node::get (old_decl);
6209 gcc_checking_assert (old_version_node);
6210 new_version_node = cgraph_node::get (new_decl);
6211 gcc_checking_assert (new_version_node);
6213 /* Copy over debug args. */
6214 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6216 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6217 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6218 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6219 old_debug_args = decl_debug_args_lookup (old_decl);
6220 if (old_debug_args)
6222 new_debug_args = decl_debug_args_insert (new_decl);
6223 *new_debug_args = vec_safe_copy (*old_debug_args);
6227 /* Output the inlining info for this abstract function, since it has been
6228 inlined. If we don't do this now, we can lose the information about the
6229 variables in the function when the blocks get blown away as soon as we
6230 remove the cgraph node. */
6231 (*debug_hooks->outlining_inline_function) (old_decl);
6233 DECL_ARTIFICIAL (new_decl) = 1;
6234 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6235 if (DECL_ORIGIN (old_decl) == old_decl)
6236 old_version_node->used_as_abstract_origin = true;
6237 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6239 /* Prepare the data structures for the tree copy. */
6240 memset (&id, 0, sizeof (id));
6242 /* Generate a new name for the new version. */
6243 id.statements_to_fold = new hash_set<gimple *>;
6245 id.decl_map = new hash_map<tree, tree>;
6246 id.debug_map = NULL;
6247 id.src_fn = old_decl;
6248 id.dst_fn = new_decl;
6249 id.src_node = old_version_node;
6250 id.dst_node = new_version_node;
6251 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6252 id.blocks_to_copy = blocks_to_copy;
6254 id.copy_decl = copy_decl_no_change;
6255 id.transform_call_graph_edges
6256 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6257 id.transform_new_cfg = true;
6258 id.transform_return_to_modify = false;
6259 id.transform_parameter = false;
6260 id.transform_lang_insert_block = NULL;
6262 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6263 (DECL_STRUCT_FUNCTION (old_decl));
6264 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6265 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6266 initialize_cfun (new_decl, old_decl,
6267 new_entry ? new_entry->count : old_entry_block->count);
6268 new_version_node->calls_declare_variant_alt
6269 = old_version_node->calls_declare_variant_alt;
6270 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6271 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6272 = id.src_cfun->gimple_df->ipa_pta;
6274 /* Copy the function's static chain. */
6275 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6276 if (p)
6277 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6278 = copy_static_chain (p, &id);
6280 auto_vec<int, 16> new_param_indices;
6281 clone_info *info = clone_info::get (old_version_node);
6282 ipa_param_adjustments *old_param_adjustments
6283 = info ? info->param_adjustments : NULL;
6284 if (old_param_adjustments)
6285 old_param_adjustments->get_updated_indices (&new_param_indices);
6287 /* If there's a tree_map, prepare for substitution. */
6288 if (tree_map)
6289 for (i = 0; i < tree_map->length (); i++)
6291 gimple *init;
6292 replace_info = (*tree_map)[i];
6294 int p = replace_info->parm_num;
6295 if (old_param_adjustments)
6296 p = new_param_indices[p];
6298 tree parm;
6299 for (parm = DECL_ARGUMENTS (old_decl); p;
6300 parm = DECL_CHAIN (parm))
6301 p--;
6302 gcc_assert (parm);
6303 init = setup_one_parameter (&id, parm, replace_info->new_tree,
6304 id.src_fn, NULL, &vars);
6305 if (init)
6306 init_stmts.safe_push (init);
6309 ipa_param_body_adjustments *param_body_adjs = NULL;
6310 if (param_adjustments)
6312 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6313 new_decl, old_decl,
6314 &id, &vars, tree_map);
6315 id.param_body_adjs = param_body_adjs;
6316 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6318 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6319 DECL_ARGUMENTS (new_decl)
6320 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6322 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6323 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6325 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6327 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6328 /* Add local vars. */
6329 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6331 if (DECL_RESULT (old_decl) == NULL_TREE)
6333 else if (param_adjustments && param_adjustments->m_skip_return
6334 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6336 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6337 &id);
6338 declare_inline_vars (NULL, resdecl_repl);
6339 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6341 DECL_RESULT (new_decl)
6342 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6343 RESULT_DECL, NULL_TREE, void_type_node);
6344 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6345 DECL_IS_MALLOC (new_decl) = false;
6346 cfun->returns_struct = 0;
6347 cfun->returns_pcc_struct = 0;
6349 else
6351 tree old_name;
6352 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6353 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6354 if (gimple_in_ssa_p (id.src_cfun)
6355 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6356 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6358 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6359 insert_decl_map (&id, old_name, new_name);
6360 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6361 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6365 /* Set up the destination functions loop tree. */
6366 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6368 cfun->curr_properties &= ~PROP_loops;
6369 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6370 cfun->curr_properties |= PROP_loops;
6373 /* Copy the Function's body. */
6374 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6375 new_entry);
6377 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6378 number_blocks (new_decl);
6380 /* We want to create the BB unconditionally, so that the addition of
6381 debug stmts doesn't affect BB count, which may in the end cause
6382 codegen differences. */
6383 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6384 while (init_stmts.length ())
6385 insert_init_stmt (&id, bb, init_stmts.pop ());
6386 update_clone_info (&id);
6388 /* Remap the nonlocal_goto_save_area, if any. */
6389 if (cfun->nonlocal_goto_save_area)
6391 struct walk_stmt_info wi;
6393 memset (&wi, 0, sizeof (wi));
6394 wi.info = &id;
6395 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6398 /* Clean up. */
6399 delete id.decl_map;
6400 if (id.debug_map)
6401 delete id.debug_map;
6402 free_dominance_info (CDI_DOMINATORS);
6403 free_dominance_info (CDI_POST_DOMINATORS);
6405 update_max_bb_count ();
6406 fold_marked_statements (0, id.statements_to_fold);
6407 delete id.statements_to_fold;
6408 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6409 if (id.dst_node->definition)
6410 cgraph_edge::rebuild_references ();
6411 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6413 calculate_dominance_info (CDI_DOMINATORS);
6414 fix_loop_structure (NULL);
6416 update_ssa (TODO_update_ssa);
6418 /* After partial cloning we need to rescale frequencies, so they are
6419 within proper range in the cloned function. */
6420 if (new_entry)
6422 struct cgraph_edge *e;
6423 rebuild_frequencies ();
6425 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6426 for (e = new_version_node->callees; e; e = e->next_callee)
6428 basic_block bb = gimple_bb (e->call_stmt);
6429 e->count = bb->count;
6431 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6433 basic_block bb = gimple_bb (e->call_stmt);
6434 e->count = bb->count;
6438 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6440 vec<tree, va_gc> **debug_args = NULL;
6441 unsigned int len = 0;
6442 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6444 for (i = 0; i < reset_len; i++)
6446 tree parm = param_body_adjs->m_reset_debug_decls[i];
6447 gcc_assert (is_gimple_reg (parm));
6448 tree ddecl;
6450 if (debug_args == NULL)
6452 debug_args = decl_debug_args_insert (new_decl);
6453 len = vec_safe_length (*debug_args);
6455 ddecl = make_node (DEBUG_EXPR_DECL);
6456 DECL_ARTIFICIAL (ddecl) = 1;
6457 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6458 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6459 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6460 vec_safe_push (*debug_args, ddecl);
6462 if (debug_args != NULL)
6464 /* On the callee side, add
6465 DEBUG D#Y s=> parm
6466 DEBUG var => D#Y
6467 stmts to the first bb where var is a VAR_DECL created for the
6468 optimized away parameter in DECL_INITIAL block. This hints
6469 in the debug info that var (whole DECL_ORIGIN is the parm
6470 PARM_DECL) is optimized away, but could be looked up at the
6471 call site as value of D#X there. */
6472 tree vexpr;
6473 gimple_stmt_iterator cgsi
6474 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6475 gimple *def_temp;
6476 tree var = vars;
6477 i = vec_safe_length (*debug_args);
6480 i -= 2;
6481 while (var != NULL_TREE
6482 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6483 var = TREE_CHAIN (var);
6484 if (var == NULL_TREE)
6485 break;
6486 vexpr = make_node (DEBUG_EXPR_DECL);
6487 tree parm = (**debug_args)[i];
6488 DECL_ARTIFICIAL (vexpr) = 1;
6489 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6490 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6491 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6492 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6493 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6494 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6496 while (i > len);
6499 delete param_body_adjs;
6500 free_dominance_info (CDI_DOMINATORS);
6501 free_dominance_info (CDI_POST_DOMINATORS);
6503 gcc_assert (!id.debug_stmts.exists ());
6504 pop_cfun ();
6505 return;
6508 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6509 the callee and return the inlined body on success. */
6511 tree
6512 maybe_inline_call_in_expr (tree exp)
6514 tree fn = get_callee_fndecl (exp);
6516 /* We can only try to inline "const" functions. */
6517 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6519 call_expr_arg_iterator iter;
6520 copy_body_data id;
6521 tree param, arg, t;
6522 hash_map<tree, tree> decl_map;
6524 /* Remap the parameters. */
6525 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6526 param;
6527 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6528 decl_map.put (param, arg);
6530 memset (&id, 0, sizeof (id));
6531 id.src_fn = fn;
6532 id.dst_fn = current_function_decl;
6533 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6534 id.decl_map = &decl_map;
6536 id.copy_decl = copy_decl_no_change;
6537 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6538 id.transform_new_cfg = false;
6539 id.transform_return_to_modify = true;
6540 id.transform_parameter = true;
6541 id.transform_lang_insert_block = NULL;
6543 /* Make sure not to unshare trees behind the front-end's back
6544 since front-end specific mechanisms may rely on sharing. */
6545 id.regimplify = false;
6546 id.do_not_unshare = true;
6548 /* We're not inside any EH region. */
6549 id.eh_lp_nr = 0;
6551 t = copy_tree_body (&id);
6553 /* We can only return something suitable for use in a GENERIC
6554 expression tree. */
6555 if (TREE_CODE (t) == MODIFY_EXPR)
6556 return TREE_OPERAND (t, 1);
6559 return NULL_TREE;
6562 /* Duplicate a type, fields and all. */
6564 tree
6565 build_duplicate_type (tree type)
6567 struct copy_body_data id;
6569 memset (&id, 0, sizeof (id));
6570 id.src_fn = current_function_decl;
6571 id.dst_fn = current_function_decl;
6572 id.src_cfun = cfun;
6573 id.decl_map = new hash_map<tree, tree>;
6574 id.debug_map = NULL;
6575 id.copy_decl = copy_decl_no_change;
6577 type = remap_type_1 (type, &id);
6579 delete id.decl_map;
6580 if (id.debug_map)
6581 delete id.debug_map;
6583 TYPE_CANONICAL (type) = type;
6585 return type;
6588 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6589 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6590 evaluation. */
6592 tree
6593 copy_fn (tree fn, tree& parms, tree& result)
6595 copy_body_data id;
6596 tree param;
6597 hash_map<tree, tree> decl_map;
6599 tree *p = &parms;
6600 *p = NULL_TREE;
6602 memset (&id, 0, sizeof (id));
6603 id.src_fn = fn;
6604 id.dst_fn = current_function_decl;
6605 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6606 id.decl_map = &decl_map;
6608 id.copy_decl = copy_decl_no_change;
6609 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6610 id.transform_new_cfg = false;
6611 id.transform_return_to_modify = false;
6612 id.transform_parameter = true;
6613 id.transform_lang_insert_block = NULL;
6615 /* Make sure not to unshare trees behind the front-end's back
6616 since front-end specific mechanisms may rely on sharing. */
6617 id.regimplify = false;
6618 id.do_not_unshare = true;
6619 id.do_not_fold = true;
6621 /* We're not inside any EH region. */
6622 id.eh_lp_nr = 0;
6624 /* Remap the parameters and result and return them to the caller. */
6625 for (param = DECL_ARGUMENTS (fn);
6626 param;
6627 param = DECL_CHAIN (param))
6629 *p = remap_decl (param, &id);
6630 p = &DECL_CHAIN (*p);
6633 if (DECL_RESULT (fn))
6634 result = remap_decl (DECL_RESULT (fn), &id);
6635 else
6636 result = NULL_TREE;
6638 return copy_tree_body (&id);