testsuite: Correct requirements for vadsdu*, vslv and vsrv testcases.
[official-gcc.git] / gcc / tree-inline.c
blob32fb3789f404ce826574cb8d05c5f74f9817bb1b
1 /* Tree inlining.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64 #include "alloc-pool.h"
65 #include "symbol-summary.h"
66 #include "symtab-thunks.h"
68 /* I'm not real happy about this, but we need to handle gimple and
69 non-gimple trees. */
71 /* Inlining, Cloning, Versioning, Parallelization
73 Inlining: a function body is duplicated, but the PARM_DECLs are
74 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
75 MODIFY_EXPRs that store to a dedicated returned-value variable.
76 The duplicated eh_region info of the copy will later be appended
77 to the info for the caller; the eh_region info in copied throwing
78 statements and RESX statements are adjusted accordingly.
80 Cloning: (only in C++) We have one body for a con/de/structor, and
81 multiple function decls, each with a unique parameter list.
82 Duplicate the body, using the given splay tree; some parameters
83 will become constants (like 0 or 1).
85 Versioning: a function body is duplicated and the result is a new
86 function rather than into blocks of an existing function as with
87 inlining. Some parameters will become constants.
89 Parallelization: a region of a function is duplicated resulting in
90 a new function. Variables may be replaced with complex expressions
91 to enable shared variable semantics.
93 All of these will simultaneously lookup any callgraph edges. If
94 we're going to inline the duplicated function body, and the given
95 function has some cloned callgraph nodes (one for each place this
96 function will be inlined) those callgraph edges will be duplicated.
97 If we're cloning the body, those callgraph edges will be
98 updated to point into the new body. (Note that the original
99 callgraph node and edge list will not be altered.)
101 See the CALL_EXPR handling case in copy_tree_body_r (). */
103 /* To Do:
105 o In order to make inlining-on-trees work, we pessimized
106 function-local static constants. In particular, they are now
107 always output, even when not addressed. Fix this by treating
108 function-local static constants just like global static
109 constants; the back-end already knows not to output them if they
110 are not needed.
112 o Provide heuristics to clamp inlining of recursive template
113 calls? */
116 /* Weights that estimate_num_insns uses to estimate the size of the
117 produced code. */
119 eni_weights eni_size_weights;
121 /* Weights that estimate_num_insns uses to estimate the time necessary
122 to execute the produced code. */
124 eni_weights eni_time_weights;
126 /* Prototypes. */
128 static tree declare_return_variable (copy_body_data *, tree, tree,
129 basic_block);
130 static void remap_block (tree *, copy_body_data *);
131 static void copy_bind_expr (tree *, int *, copy_body_data *);
132 static void declare_inline_vars (tree, tree);
133 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
134 static void prepend_lexical_block (tree current_block, tree new_block);
135 static tree copy_result_decl_to_var (tree, copy_body_data *);
136 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
137 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
138 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
140 /* Insert a tree->tree mapping for ID. Despite the name suggests
141 that the trees should be variables, it is used for more than that. */
143 void
144 insert_decl_map (copy_body_data *id, tree key, tree value)
146 id->decl_map->put (key, value);
148 /* Always insert an identity map as well. If we see this same new
149 node again, we won't want to duplicate it a second time. */
150 if (key != value)
151 id->decl_map->put (value, value);
154 /* Insert a tree->tree mapping for ID. This is only used for
155 variables. */
157 static void
158 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
160 if (!gimple_in_ssa_p (id->src_cfun))
161 return;
163 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
164 return;
166 if (!target_for_debug_bind (key))
167 return;
169 gcc_assert (TREE_CODE (key) == PARM_DECL);
170 gcc_assert (VAR_P (value));
172 if (!id->debug_map)
173 id->debug_map = new hash_map<tree, tree>;
175 id->debug_map->put (key, value);
178 /* If nonzero, we're remapping the contents of inlined debug
179 statements. If negative, an error has occurred, such as a
180 reference to a variable that isn't available in the inlined
181 context. */
182 static int processing_debug_stmt = 0;
184 /* Construct new SSA name for old NAME. ID is the inline context. */
186 static tree
187 remap_ssa_name (tree name, copy_body_data *id)
189 tree new_tree, var;
190 tree *n;
192 gcc_assert (TREE_CODE (name) == SSA_NAME);
194 n = id->decl_map->get (name);
195 if (n)
197 /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
198 remove an unused LHS from a call statement. Such LHS can however
199 still appear in debug statements, but their value is lost in this
200 function and we do not want to map them. */
201 if (id->killed_new_ssa_names
202 && id->killed_new_ssa_names->contains (*n))
204 gcc_assert (processing_debug_stmt);
205 processing_debug_stmt = -1;
206 return name;
209 return unshare_expr (*n);
212 if (processing_debug_stmt)
214 if (SSA_NAME_IS_DEFAULT_DEF (name)
215 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
216 && id->entry_bb == NULL
217 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
219 tree vexpr = make_node (DEBUG_EXPR_DECL);
220 gimple *def_temp;
221 gimple_stmt_iterator gsi;
222 tree val = SSA_NAME_VAR (name);
224 n = id->decl_map->get (val);
225 if (n != NULL)
226 val = *n;
227 if (TREE_CODE (val) != PARM_DECL
228 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
230 processing_debug_stmt = -1;
231 return name;
233 n = id->decl_map->get (val);
234 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
235 return *n;
236 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
237 DECL_ARTIFICIAL (vexpr) = 1;
238 TREE_TYPE (vexpr) = TREE_TYPE (name);
239 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
240 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
241 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
242 insert_decl_map (id, val, vexpr);
243 return vexpr;
246 processing_debug_stmt = -1;
247 return name;
250 /* Remap anonymous SSA names or SSA names of anonymous decls. */
251 var = SSA_NAME_VAR (name);
252 if (!var
253 || (!SSA_NAME_IS_DEFAULT_DEF (name)
254 && VAR_P (var)
255 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
256 && DECL_ARTIFICIAL (var)
257 && DECL_IGNORED_P (var)
258 && !DECL_NAME (var)))
260 struct ptr_info_def *pi;
261 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
262 if (!var && SSA_NAME_IDENTIFIER (name))
263 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
264 insert_decl_map (id, name, new_tree);
265 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
266 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
267 /* At least IPA points-to info can be directly transferred. */
268 if (id->src_cfun->gimple_df
269 && id->src_cfun->gimple_df->ipa_pta
270 && POINTER_TYPE_P (TREE_TYPE (name))
271 && (pi = SSA_NAME_PTR_INFO (name))
272 && !pi->pt.anything)
274 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
275 new_pi->pt = pi->pt;
277 /* So can range-info. */
278 if (!POINTER_TYPE_P (TREE_TYPE (name))
279 && SSA_NAME_RANGE_INFO (name))
280 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
281 SSA_NAME_RANGE_INFO (name));
282 return new_tree;
285 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
286 in copy_bb. */
287 new_tree = remap_decl (var, id);
289 /* We might've substituted constant or another SSA_NAME for
290 the variable.
292 Replace the SSA name representing RESULT_DECL by variable during
293 inlining: this saves us from need to introduce PHI node in a case
294 return value is just partly initialized. */
295 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
296 && (!SSA_NAME_VAR (name)
297 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
298 || !id->transform_return_to_modify))
300 struct ptr_info_def *pi;
301 new_tree = make_ssa_name (new_tree);
302 insert_decl_map (id, name, new_tree);
303 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
304 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
305 /* At least IPA points-to info can be directly transferred. */
306 if (id->src_cfun->gimple_df
307 && id->src_cfun->gimple_df->ipa_pta
308 && POINTER_TYPE_P (TREE_TYPE (name))
309 && (pi = SSA_NAME_PTR_INFO (name))
310 && !pi->pt.anything)
312 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
313 new_pi->pt = pi->pt;
315 /* So can range-info. */
316 if (!POINTER_TYPE_P (TREE_TYPE (name))
317 && SSA_NAME_RANGE_INFO (name))
318 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
319 SSA_NAME_RANGE_INFO (name));
320 if (SSA_NAME_IS_DEFAULT_DEF (name))
322 /* By inlining function having uninitialized variable, we might
323 extend the lifetime (variable might get reused). This cause
324 ICE in the case we end up extending lifetime of SSA name across
325 abnormal edge, but also increase register pressure.
327 We simply initialize all uninitialized vars by 0 except
328 for case we are inlining to very first BB. We can avoid
329 this for all BBs that are not inside strongly connected
330 regions of the CFG, but this is expensive to test. */
331 if (id->entry_bb
332 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
333 && (!SSA_NAME_VAR (name)
334 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
335 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
336 0)->dest
337 || EDGE_COUNT (id->entry_bb->preds) != 1))
339 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
340 gimple *init_stmt;
341 tree zero = build_zero_cst (TREE_TYPE (new_tree));
343 init_stmt = gimple_build_assign (new_tree, zero);
344 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
345 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
347 else
349 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
350 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
354 else
355 insert_decl_map (id, name, new_tree);
356 return new_tree;
359 /* Remap DECL during the copying of the BLOCK tree for the function. */
361 tree
362 remap_decl (tree decl, copy_body_data *id)
364 tree *n;
366 /* We only remap local variables in the current function. */
368 /* See if we have remapped this declaration. */
370 n = id->decl_map->get (decl);
372 if (!n && processing_debug_stmt)
374 processing_debug_stmt = -1;
375 return decl;
378 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
379 necessary DECLs have already been remapped and we do not want to duplicate
380 a decl coming from outside of the sequence we are copying. */
381 if (!n
382 && id->prevent_decl_creation_for_types
383 && id->remapping_type_depth > 0
384 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
385 return decl;
387 /* If we didn't already have an equivalent for this declaration, create one
388 now. */
389 if (!n)
391 /* Make a copy of the variable or label. */
392 tree t = id->copy_decl (decl, id);
394 /* Remember it, so that if we encounter this local entity again
395 we can reuse this copy. Do this early because remap_type may
396 need this decl for TYPE_STUB_DECL. */
397 insert_decl_map (id, decl, t);
399 if (!DECL_P (t))
400 return t;
402 /* Remap types, if necessary. */
403 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
404 if (TREE_CODE (t) == TYPE_DECL)
406 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
408 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
409 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
410 is not set on the TYPE_DECL, for example in LTO mode. */
411 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
413 tree x = build_variant_type_copy (TREE_TYPE (t));
414 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
415 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
416 DECL_ORIGINAL_TYPE (t) = x;
420 /* Remap sizes as necessary. */
421 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
422 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
424 /* If fields, do likewise for offset and qualifier. */
425 if (TREE_CODE (t) == FIELD_DECL)
427 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
428 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
429 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
432 return t;
435 if (id->do_not_unshare)
436 return *n;
437 else
438 return unshare_expr (*n);
441 static tree
442 remap_type_1 (tree type, copy_body_data *id)
444 tree new_tree, t;
446 /* We do need a copy. build and register it now. If this is a pointer or
447 reference type, remap the designated type and make a new pointer or
448 reference type. */
449 if (TREE_CODE (type) == POINTER_TYPE)
451 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
452 TYPE_MODE (type),
453 TYPE_REF_CAN_ALIAS_ALL (type));
454 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
455 new_tree = build_type_attribute_qual_variant (new_tree,
456 TYPE_ATTRIBUTES (type),
457 TYPE_QUALS (type));
458 insert_decl_map (id, type, new_tree);
459 return new_tree;
461 else if (TREE_CODE (type) == REFERENCE_TYPE)
463 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
464 TYPE_MODE (type),
465 TYPE_REF_CAN_ALIAS_ALL (type));
466 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
467 new_tree = build_type_attribute_qual_variant (new_tree,
468 TYPE_ATTRIBUTES (type),
469 TYPE_QUALS (type));
470 insert_decl_map (id, type, new_tree);
471 return new_tree;
473 else
474 new_tree = copy_node (type);
476 insert_decl_map (id, type, new_tree);
478 /* This is a new type, not a copy of an old type. Need to reassociate
479 variants. We can handle everything except the main variant lazily. */
480 t = TYPE_MAIN_VARIANT (type);
481 if (type != t)
483 t = remap_type (t, id);
484 TYPE_MAIN_VARIANT (new_tree) = t;
485 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
486 TYPE_NEXT_VARIANT (t) = new_tree;
488 else
490 TYPE_MAIN_VARIANT (new_tree) = new_tree;
491 TYPE_NEXT_VARIANT (new_tree) = NULL;
494 if (TYPE_STUB_DECL (type))
495 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
497 /* Lazily create pointer and reference types. */
498 TYPE_POINTER_TO (new_tree) = NULL;
499 TYPE_REFERENCE_TO (new_tree) = NULL;
501 /* Copy all types that may contain references to local variables; be sure to
502 preserve sharing in between type and its main variant when possible. */
503 switch (TREE_CODE (new_tree))
505 case INTEGER_TYPE:
506 case REAL_TYPE:
507 case FIXED_POINT_TYPE:
508 case ENUMERAL_TYPE:
509 case BOOLEAN_TYPE:
510 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
512 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
513 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
515 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
516 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
518 else
520 t = TYPE_MIN_VALUE (new_tree);
521 if (t && TREE_CODE (t) != INTEGER_CST)
522 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
524 t = TYPE_MAX_VALUE (new_tree);
525 if (t && TREE_CODE (t) != INTEGER_CST)
526 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
528 return new_tree;
530 case FUNCTION_TYPE:
531 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
532 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
533 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
534 else
535 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
536 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
537 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
538 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
539 else
540 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
541 return new_tree;
543 case ARRAY_TYPE:
544 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
545 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
546 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
547 else
548 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
550 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
552 gcc_checking_assert (TYPE_DOMAIN (type)
553 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
554 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
556 else
558 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
559 /* For array bounds where we have decided not to copy over the bounds
560 variable which isn't used in OpenMP/OpenACC region, change them to
561 an uninitialized VAR_DECL temporary. */
562 if (id->adjust_array_error_bounds
563 && TYPE_DOMAIN (new_tree)
564 && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
565 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
567 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
568 DECL_ATTRIBUTES (v)
569 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
570 DECL_ATTRIBUTES (v));
571 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
574 break;
576 case RECORD_TYPE:
577 case UNION_TYPE:
578 case QUAL_UNION_TYPE:
579 if (TYPE_MAIN_VARIANT (type) != type
580 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
581 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
582 else
584 tree f, nf = NULL;
586 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
588 t = remap_decl (f, id);
589 DECL_CONTEXT (t) = new_tree;
590 DECL_CHAIN (t) = nf;
591 nf = t;
593 TYPE_FIELDS (new_tree) = nreverse (nf);
595 break;
597 case OFFSET_TYPE:
598 default:
599 /* Shouldn't have been thought variable sized. */
600 gcc_unreachable ();
603 /* All variants of type share the same size, so use the already remaped data. */
604 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
606 tree s = TYPE_SIZE (type);
607 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
608 tree su = TYPE_SIZE_UNIT (type);
609 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
610 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
611 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
612 || s == mvs);
613 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
614 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
615 || su == mvsu);
616 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
617 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
619 else
621 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
622 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
625 return new_tree;
628 /* Helper function for remap_type_2, called through walk_tree. */
630 static tree
631 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
633 copy_body_data *id = (copy_body_data *) data;
635 if (TYPE_P (*tp))
636 *walk_subtrees = 0;
638 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
639 return *tp;
641 return NULL_TREE;
644 /* Return true if TYPE needs to be remapped because remap_decl on any
645 needed embedded decl returns something other than that decl. */
647 static bool
648 remap_type_2 (tree type, copy_body_data *id)
650 tree t;
652 #define RETURN_TRUE_IF_VAR(T) \
653 do \
655 tree _t = (T); \
656 if (_t) \
658 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
659 return true; \
660 if (!TYPE_SIZES_GIMPLIFIED (type) \
661 && walk_tree (&_t, remap_type_3, id, NULL)) \
662 return true; \
665 while (0)
667 switch (TREE_CODE (type))
669 case POINTER_TYPE:
670 case REFERENCE_TYPE:
671 case FUNCTION_TYPE:
672 case METHOD_TYPE:
673 return remap_type_2 (TREE_TYPE (type), id);
675 case INTEGER_TYPE:
676 case REAL_TYPE:
677 case FIXED_POINT_TYPE:
678 case ENUMERAL_TYPE:
679 case BOOLEAN_TYPE:
680 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
681 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
682 return false;
684 case ARRAY_TYPE:
685 if (remap_type_2 (TREE_TYPE (type), id)
686 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
687 return true;
688 break;
690 case RECORD_TYPE:
691 case UNION_TYPE:
692 case QUAL_UNION_TYPE:
693 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
694 if (TREE_CODE (t) == FIELD_DECL)
696 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
697 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
698 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
699 if (TREE_CODE (type) == QUAL_UNION_TYPE)
700 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
702 break;
704 default:
705 return false;
708 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
709 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
710 return false;
711 #undef RETURN_TRUE_IF_VAR
714 tree
715 remap_type (tree type, copy_body_data *id)
717 tree *node;
718 tree tmp;
720 if (type == NULL)
721 return type;
723 /* See if we have remapped this type. */
724 node = id->decl_map->get (type);
725 if (node)
726 return *node;
728 /* The type only needs remapping if it's variably modified. */
729 if (! variably_modified_type_p (type, id->src_fn)
730 /* Don't remap if copy_decl method doesn't always return a new
731 decl and for all embedded decls returns the passed in decl. */
732 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
734 insert_decl_map (id, type, type);
735 return type;
738 id->remapping_type_depth++;
739 tmp = remap_type_1 (type, id);
740 id->remapping_type_depth--;
742 return tmp;
745 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
747 static bool
748 can_be_nonlocal (tree decl, copy_body_data *id)
750 /* We cannot duplicate function decls. */
751 if (TREE_CODE (decl) == FUNCTION_DECL)
752 return true;
754 /* Local static vars must be non-local or we get multiple declaration
755 problems. */
756 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
757 return true;
759 return false;
762 static tree
763 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
764 copy_body_data *id)
766 tree old_var;
767 tree new_decls = NULL_TREE;
769 /* Remap its variables. */
770 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
772 tree new_var;
774 if (can_be_nonlocal (old_var, id))
776 /* We need to add this variable to the local decls as otherwise
777 nothing else will do so. */
778 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
779 add_local_decl (cfun, old_var);
780 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
781 && !DECL_IGNORED_P (old_var)
782 && nonlocalized_list)
783 vec_safe_push (*nonlocalized_list, old_var);
784 continue;
787 /* Remap the variable. */
788 new_var = remap_decl (old_var, id);
790 /* If we didn't remap this variable, we can't mess with its
791 TREE_CHAIN. If we remapped this variable to the return slot, it's
792 already declared somewhere else, so don't declare it here. */
794 if (new_var == id->retvar)
796 else if (!new_var)
798 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
799 && !DECL_IGNORED_P (old_var)
800 && nonlocalized_list)
801 vec_safe_push (*nonlocalized_list, old_var);
803 else
805 gcc_assert (DECL_P (new_var));
806 DECL_CHAIN (new_var) = new_decls;
807 new_decls = new_var;
809 /* Also copy value-expressions. */
810 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
812 tree tem = DECL_VALUE_EXPR (new_var);
813 bool old_regimplify = id->regimplify;
814 id->remapping_type_depth++;
815 walk_tree (&tem, copy_tree_body_r, id, NULL);
816 id->remapping_type_depth--;
817 id->regimplify = old_regimplify;
818 SET_DECL_VALUE_EXPR (new_var, tem);
823 return nreverse (new_decls);
826 /* Copy the BLOCK to contain remapped versions of the variables
827 therein. And hook the new block into the block-tree. */
829 static void
830 remap_block (tree *block, copy_body_data *id)
832 tree old_block;
833 tree new_block;
835 /* Make the new block. */
836 old_block = *block;
837 new_block = make_node (BLOCK);
838 TREE_USED (new_block) = TREE_USED (old_block);
839 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
840 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
841 BLOCK_NONLOCALIZED_VARS (new_block)
842 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
843 *block = new_block;
845 /* Remap its variables. */
846 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
847 &BLOCK_NONLOCALIZED_VARS (new_block),
848 id);
850 if (id->transform_lang_insert_block)
851 id->transform_lang_insert_block (new_block);
853 /* Remember the remapped block. */
854 insert_decl_map (id, old_block, new_block);
857 /* Copy the whole block tree and root it in id->block. */
859 static tree
860 remap_blocks (tree block, copy_body_data *id)
862 tree t;
863 tree new_tree = block;
865 if (!block)
866 return NULL;
868 remap_block (&new_tree, id);
869 gcc_assert (new_tree != block);
870 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
871 prepend_lexical_block (new_tree, remap_blocks (t, id));
872 /* Blocks are in arbitrary order, but make things slightly prettier and do
873 not swap order when producing a copy. */
874 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
875 return new_tree;
878 /* Remap the block tree rooted at BLOCK to nothing. */
880 static void
881 remap_blocks_to_null (tree block, copy_body_data *id)
883 tree t;
884 insert_decl_map (id, block, NULL_TREE);
885 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
886 remap_blocks_to_null (t, id);
889 /* Remap the location info pointed to by LOCUS. */
891 static location_t
892 remap_location (location_t locus, copy_body_data *id)
894 if (LOCATION_BLOCK (locus))
896 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
897 gcc_assert (n);
898 if (*n)
899 return set_block (locus, *n);
902 locus = LOCATION_LOCUS (locus);
904 if (locus != UNKNOWN_LOCATION && id->block)
905 return set_block (locus, id->block);
907 return locus;
910 static void
911 copy_statement_list (tree *tp)
913 tree_stmt_iterator oi, ni;
914 tree new_tree;
916 new_tree = alloc_stmt_list ();
917 ni = tsi_start (new_tree);
918 oi = tsi_start (*tp);
919 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
920 *tp = new_tree;
922 for (; !tsi_end_p (oi); tsi_next (&oi))
924 tree stmt = tsi_stmt (oi);
925 if (TREE_CODE (stmt) == STATEMENT_LIST)
926 /* This copy is not redundant; tsi_link_after will smash this
927 STATEMENT_LIST into the end of the one we're building, and we
928 don't want to do that with the original. */
929 copy_statement_list (&stmt);
930 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
934 static void
935 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
937 tree block = BIND_EXPR_BLOCK (*tp);
938 /* Copy (and replace) the statement. */
939 copy_tree_r (tp, walk_subtrees, NULL);
940 if (block)
942 remap_block (&block, id);
943 BIND_EXPR_BLOCK (*tp) = block;
946 if (BIND_EXPR_VARS (*tp))
947 /* This will remap a lot of the same decls again, but this should be
948 harmless. */
949 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
953 /* Create a new gimple_seq by remapping all the statements in BODY
954 using the inlining information in ID. */
956 static gimple_seq
957 remap_gimple_seq (gimple_seq body, copy_body_data *id)
959 gimple_stmt_iterator si;
960 gimple_seq new_body = NULL;
962 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
964 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
965 gimple_seq_add_seq (&new_body, new_stmts);
968 return new_body;
972 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
973 block using the mapping information in ID. */
975 static gimple *
976 copy_gimple_bind (gbind *stmt, copy_body_data *id)
978 gimple *new_bind;
979 tree new_block, new_vars;
980 gimple_seq body, new_body;
982 /* Copy the statement. Note that we purposely don't use copy_stmt
983 here because we need to remap statements as we copy. */
984 body = gimple_bind_body (stmt);
985 new_body = remap_gimple_seq (body, id);
987 new_block = gimple_bind_block (stmt);
988 if (new_block)
989 remap_block (&new_block, id);
991 /* This will remap a lot of the same decls again, but this should be
992 harmless. */
993 new_vars = gimple_bind_vars (stmt);
994 if (new_vars)
995 new_vars = remap_decls (new_vars, NULL, id);
997 new_bind = gimple_build_bind (new_vars, new_body, new_block);
999 return new_bind;
1002 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
1004 static bool
1005 is_parm (tree decl)
1007 if (TREE_CODE (decl) == SSA_NAME)
1009 decl = SSA_NAME_VAR (decl);
1010 if (!decl)
1011 return false;
1014 return (TREE_CODE (decl) == PARM_DECL);
1017 /* Remap the dependence CLIQUE from the source to the destination function
1018 as specified in ID. */
1020 static unsigned short
1021 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1023 if (clique == 0 || processing_debug_stmt)
1024 return 0;
1025 if (!id->dependence_map)
1026 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1027 bool existed;
1028 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1029 if (!existed)
1031 /* Clique 1 is reserved for local ones set by PTA. */
1032 if (cfun->last_clique == 0)
1033 cfun->last_clique = 1;
1034 newc = ++cfun->last_clique;
1036 return newc;
1039 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1040 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1041 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1042 recursing into the children nodes of *TP. */
1044 static tree
1045 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1047 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1048 copy_body_data *id = (copy_body_data *) wi_p->info;
1049 tree fn = id->src_fn;
1051 /* For recursive invocations this is no longer the LHS itself. */
1052 bool is_lhs = wi_p->is_lhs;
1053 wi_p->is_lhs = false;
1055 if (TREE_CODE (*tp) == SSA_NAME)
1057 *tp = remap_ssa_name (*tp, id);
1058 *walk_subtrees = 0;
1059 if (is_lhs)
1060 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1061 return NULL;
1063 else if (auto_var_in_fn_p (*tp, fn))
1065 /* Local variables and labels need to be replaced by equivalent
1066 variables. We don't want to copy static variables; there's
1067 only one of those, no matter how many times we inline the
1068 containing function. Similarly for globals from an outer
1069 function. */
1070 tree new_decl;
1072 /* Remap the declaration. */
1073 new_decl = remap_decl (*tp, id);
1074 gcc_assert (new_decl);
1075 /* Replace this variable with the copy. */
1076 STRIP_TYPE_NOPS (new_decl);
1077 /* ??? The C++ frontend uses void * pointer zero to initialize
1078 any other type. This confuses the middle-end type verification.
1079 As cloned bodies do not go through gimplification again the fixup
1080 there doesn't trigger. */
1081 if (TREE_CODE (new_decl) == INTEGER_CST
1082 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1083 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1084 *tp = new_decl;
1085 *walk_subtrees = 0;
1087 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1088 gcc_unreachable ();
1089 else if (TREE_CODE (*tp) == SAVE_EXPR)
1090 gcc_unreachable ();
1091 else if (TREE_CODE (*tp) == LABEL_DECL
1092 && (!DECL_CONTEXT (*tp)
1093 || decl_function_context (*tp) == id->src_fn))
1094 /* These may need to be remapped for EH handling. */
1095 *tp = remap_decl (*tp, id);
1096 else if (TREE_CODE (*tp) == FIELD_DECL)
1098 /* If the enclosing record type is variably_modified_type_p, the field
1099 has already been remapped. Otherwise, it need not be. */
1100 tree *n = id->decl_map->get (*tp);
1101 if (n)
1102 *tp = *n;
1103 *walk_subtrees = 0;
1105 else if (TYPE_P (*tp))
1106 /* Types may need remapping as well. */
1107 *tp = remap_type (*tp, id);
1108 else if (CONSTANT_CLASS_P (*tp))
1110 /* If this is a constant, we have to copy the node iff the type
1111 will be remapped. copy_tree_r will not copy a constant. */
1112 tree new_type = remap_type (TREE_TYPE (*tp), id);
1114 if (new_type == TREE_TYPE (*tp))
1115 *walk_subtrees = 0;
1117 else if (TREE_CODE (*tp) == INTEGER_CST)
1118 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1119 else
1121 *tp = copy_node (*tp);
1122 TREE_TYPE (*tp) = new_type;
1125 else
1127 /* Otherwise, just copy the node. Note that copy_tree_r already
1128 knows not to copy VAR_DECLs, etc., so this is safe. */
1130 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1132 /* We need to re-canonicalize MEM_REFs from inline substitutions
1133 that can happen when a pointer argument is an ADDR_EXPR.
1134 Recurse here manually to allow that. */
1135 tree ptr = TREE_OPERAND (*tp, 0);
1136 tree type = remap_type (TREE_TYPE (*tp), id);
1137 tree old = *tp;
1138 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1139 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1140 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1141 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1142 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1143 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1145 MR_DEPENDENCE_CLIQUE (*tp)
1146 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1147 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1149 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1150 remapped a parameter as the property might be valid only
1151 for the parameter itself. */
1152 if (TREE_THIS_NOTRAP (old)
1153 && (!is_parm (TREE_OPERAND (old, 0))
1154 || (!id->transform_parameter && is_parm (ptr))))
1155 TREE_THIS_NOTRAP (*tp) = 1;
1156 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1157 *walk_subtrees = 0;
1158 return NULL;
1161 /* Here is the "usual case". Copy this tree node, and then
1162 tweak some special cases. */
1163 copy_tree_r (tp, walk_subtrees, NULL);
1165 if (TREE_CODE (*tp) != OMP_CLAUSE)
1166 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1168 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1170 /* The copied TARGET_EXPR has never been expanded, even if the
1171 original node was expanded already. */
1172 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1173 TREE_OPERAND (*tp, 3) = NULL_TREE;
1175 else if (TREE_CODE (*tp) == ADDR_EXPR)
1177 /* Variable substitution need not be simple. In particular,
1178 the MEM_REF substitution above. Make sure that
1179 TREE_CONSTANT and friends are up-to-date. */
1180 int invariant = is_gimple_min_invariant (*tp);
1181 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1182 recompute_tree_invariant_for_addr_expr (*tp);
1184 /* If this used to be invariant, but is not any longer,
1185 then regimplification is probably needed. */
1186 if (invariant && !is_gimple_min_invariant (*tp))
1187 id->regimplify = true;
1189 *walk_subtrees = 0;
1193 /* Update the TREE_BLOCK for the cloned expr. */
1194 if (EXPR_P (*tp))
1196 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1197 tree old_block = TREE_BLOCK (*tp);
1198 if (old_block)
1200 tree *n;
1201 n = id->decl_map->get (TREE_BLOCK (*tp));
1202 if (n)
1203 new_block = *n;
1205 TREE_SET_BLOCK (*tp, new_block);
1208 /* Keep iterating. */
1209 return NULL_TREE;
1213 /* Called from copy_body_id via walk_tree. DATA is really a
1214 `copy_body_data *'. */
1216 tree
1217 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1219 copy_body_data *id = (copy_body_data *) data;
1220 tree fn = id->src_fn;
1221 tree new_block;
1223 /* Begin by recognizing trees that we'll completely rewrite for the
1224 inlining context. Our output for these trees is completely
1225 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1226 into an edge). Further down, we'll handle trees that get
1227 duplicated and/or tweaked. */
1229 /* When requested, RETURN_EXPRs should be transformed to just the
1230 contained MODIFY_EXPR. The branch semantics of the return will
1231 be handled elsewhere by manipulating the CFG rather than a statement. */
1232 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1234 tree assignment = TREE_OPERAND (*tp, 0);
1236 /* If we're returning something, just turn that into an
1237 assignment into the equivalent of the original RESULT_DECL.
1238 If the "assignment" is just the result decl, the result
1239 decl has already been set (e.g. a recent "foo (&result_decl,
1240 ...)"); just toss the entire RETURN_EXPR. */
1241 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1243 /* Replace the RETURN_EXPR with (a copy of) the
1244 MODIFY_EXPR hanging underneath. */
1245 *tp = copy_node (assignment);
1247 else /* Else the RETURN_EXPR returns no value. */
1249 *tp = NULL;
1250 return (tree) (void *)1;
1253 else if (TREE_CODE (*tp) == SSA_NAME)
1255 *tp = remap_ssa_name (*tp, id);
1256 *walk_subtrees = 0;
1257 return NULL;
1260 /* Local variables and labels need to be replaced by equivalent
1261 variables. We don't want to copy static variables; there's only
1262 one of those, no matter how many times we inline the containing
1263 function. Similarly for globals from an outer function. */
1264 else if (auto_var_in_fn_p (*tp, fn))
1266 tree new_decl;
1268 /* Remap the declaration. */
1269 new_decl = remap_decl (*tp, id);
1270 gcc_assert (new_decl);
1271 /* Replace this variable with the copy. */
1272 STRIP_TYPE_NOPS (new_decl);
1273 *tp = new_decl;
1274 *walk_subtrees = 0;
1276 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1277 copy_statement_list (tp);
1278 else if (TREE_CODE (*tp) == SAVE_EXPR
1279 || TREE_CODE (*tp) == TARGET_EXPR)
1280 remap_save_expr (tp, id->decl_map, walk_subtrees);
1281 else if (TREE_CODE (*tp) == LABEL_DECL
1282 && (! DECL_CONTEXT (*tp)
1283 || decl_function_context (*tp) == id->src_fn))
1284 /* These may need to be remapped for EH handling. */
1285 *tp = remap_decl (*tp, id);
1286 else if (TREE_CODE (*tp) == BIND_EXPR)
1287 copy_bind_expr (tp, walk_subtrees, id);
1288 /* Types may need remapping as well. */
1289 else if (TYPE_P (*tp))
1290 *tp = remap_type (*tp, id);
1292 /* If this is a constant, we have to copy the node iff the type will be
1293 remapped. copy_tree_r will not copy a constant. */
1294 else if (CONSTANT_CLASS_P (*tp))
1296 tree new_type = remap_type (TREE_TYPE (*tp), id);
1298 if (new_type == TREE_TYPE (*tp))
1299 *walk_subtrees = 0;
1301 else if (TREE_CODE (*tp) == INTEGER_CST)
1302 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1303 else
1305 *tp = copy_node (*tp);
1306 TREE_TYPE (*tp) = new_type;
1310 /* Otherwise, just copy the node. Note that copy_tree_r already
1311 knows not to copy VAR_DECLs, etc., so this is safe. */
1312 else
1314 /* Here we handle trees that are not completely rewritten.
1315 First we detect some inlining-induced bogosities for
1316 discarding. */
1317 if (TREE_CODE (*tp) == MODIFY_EXPR
1318 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1319 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1321 /* Some assignments VAR = VAR; don't generate any rtl code
1322 and thus don't count as variable modification. Avoid
1323 keeping bogosities like 0 = 0. */
1324 tree decl = TREE_OPERAND (*tp, 0), value;
1325 tree *n;
1327 n = id->decl_map->get (decl);
1328 if (n)
1330 value = *n;
1331 STRIP_TYPE_NOPS (value);
1332 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1334 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1335 return copy_tree_body_r (tp, walk_subtrees, data);
1339 else if (TREE_CODE (*tp) == INDIRECT_REF)
1341 /* Get rid of *& from inline substitutions that can happen when a
1342 pointer argument is an ADDR_EXPR. */
1343 tree decl = TREE_OPERAND (*tp, 0);
1344 tree *n = id->decl_map->get (decl);
1345 if (n)
1347 /* If we happen to get an ADDR_EXPR in n->value, strip
1348 it manually here as we'll eventually get ADDR_EXPRs
1349 which lie about their types pointed to. In this case
1350 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1351 but we absolutely rely on that. As fold_indirect_ref
1352 does other useful transformations, try that first, though. */
1353 tree type = TREE_TYPE (*tp);
1354 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1355 tree old = *tp;
1356 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1357 if (! *tp)
1359 type = remap_type (type, id);
1360 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1363 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1364 /* ??? We should either assert here or build
1365 a VIEW_CONVERT_EXPR instead of blindly leaking
1366 incompatible types to our IL. */
1367 if (! *tp)
1368 *tp = TREE_OPERAND (ptr, 0);
1370 else
1372 *tp = build1 (INDIRECT_REF, type, ptr);
1373 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1374 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1375 TREE_READONLY (*tp) = TREE_READONLY (old);
1376 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1377 have remapped a parameter as the property might be
1378 valid only for the parameter itself. */
1379 if (TREE_THIS_NOTRAP (old)
1380 && (!is_parm (TREE_OPERAND (old, 0))
1381 || (!id->transform_parameter && is_parm (ptr))))
1382 TREE_THIS_NOTRAP (*tp) = 1;
1385 *walk_subtrees = 0;
1386 return NULL;
1389 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1391 /* We need to re-canonicalize MEM_REFs from inline substitutions
1392 that can happen when a pointer argument is an ADDR_EXPR.
1393 Recurse here manually to allow that. */
1394 tree ptr = TREE_OPERAND (*tp, 0);
1395 tree type = remap_type (TREE_TYPE (*tp), id);
1396 tree old = *tp;
1397 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1398 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1399 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1400 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1401 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1402 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1404 MR_DEPENDENCE_CLIQUE (*tp)
1405 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1406 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1408 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1409 remapped a parameter as the property might be valid only
1410 for the parameter itself. */
1411 if (TREE_THIS_NOTRAP (old)
1412 && (!is_parm (TREE_OPERAND (old, 0))
1413 || (!id->transform_parameter && is_parm (ptr))))
1414 TREE_THIS_NOTRAP (*tp) = 1;
1415 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1416 *walk_subtrees = 0;
1417 return NULL;
1420 /* Here is the "usual case". Copy this tree node, and then
1421 tweak some special cases. */
1422 copy_tree_r (tp, walk_subtrees, NULL);
1424 /* If EXPR has block defined, map it to newly constructed block.
1425 When inlining we want EXPRs without block appear in the block
1426 of function call if we are not remapping a type. */
1427 if (EXPR_P (*tp))
1429 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1430 if (TREE_BLOCK (*tp))
1432 tree *n;
1433 n = id->decl_map->get (TREE_BLOCK (*tp));
1434 if (n)
1435 new_block = *n;
1437 TREE_SET_BLOCK (*tp, new_block);
1440 if (TREE_CODE (*tp) != OMP_CLAUSE)
1441 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1443 /* The copied TARGET_EXPR has never been expanded, even if the
1444 original node was expanded already. */
1445 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1447 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1448 TREE_OPERAND (*tp, 3) = NULL_TREE;
1451 /* Variable substitution need not be simple. In particular, the
1452 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1453 and friends are up-to-date. */
1454 else if (TREE_CODE (*tp) == ADDR_EXPR)
1456 int invariant = is_gimple_min_invariant (*tp);
1457 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1459 /* Handle the case where we substituted an INDIRECT_REF
1460 into the operand of the ADDR_EXPR. */
1461 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1462 && !id->do_not_fold)
1464 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1465 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1466 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1467 *tp = t;
1469 else
1470 recompute_tree_invariant_for_addr_expr (*tp);
1472 /* If this used to be invariant, but is not any longer,
1473 then regimplification is probably needed. */
1474 if (invariant && !is_gimple_min_invariant (*tp))
1475 id->regimplify = true;
1477 *walk_subtrees = 0;
1481 /* Keep iterating. */
1482 return NULL_TREE;
1485 /* Helper for remap_gimple_stmt. Given an EH region number for the
1486 source function, map that to the duplicate EH region number in
1487 the destination function. */
1489 static int
1490 remap_eh_region_nr (int old_nr, copy_body_data *id)
1492 eh_region old_r, new_r;
1494 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1495 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1497 return new_r->index;
1500 /* Similar, but operate on INTEGER_CSTs. */
1502 static tree
1503 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1505 int old_nr, new_nr;
1507 old_nr = tree_to_shwi (old_t_nr);
1508 new_nr = remap_eh_region_nr (old_nr, id);
1510 return build_int_cst (integer_type_node, new_nr);
1513 /* Helper for copy_bb. Remap statement STMT using the inlining
1514 information in ID. Return the new statement copy. */
1516 static gimple_seq
1517 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1519 gimple *copy = NULL;
1520 struct walk_stmt_info wi;
1521 bool skip_first = false;
1522 gimple_seq stmts = NULL;
1524 if (is_gimple_debug (stmt)
1525 && (gimple_debug_nonbind_marker_p (stmt)
1526 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1527 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1528 return NULL;
1530 /* Begin by recognizing trees that we'll completely rewrite for the
1531 inlining context. Our output for these trees is completely
1532 different from our input (e.g. RETURN_EXPR is deleted and morphs
1533 into an edge). Further down, we'll handle trees that get
1534 duplicated and/or tweaked. */
1536 /* When requested, GIMPLE_RETURN should be transformed to just the
1537 contained GIMPLE_ASSIGN. The branch semantics of the return will
1538 be handled elsewhere by manipulating the CFG rather than the
1539 statement. */
1540 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1542 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1544 /* If we're returning something, just turn that into an
1545 assignment to the equivalent of the original RESULT_DECL.
1546 If RETVAL is just the result decl, the result decl has
1547 already been set (e.g. a recent "foo (&result_decl, ...)");
1548 just toss the entire GIMPLE_RETURN. Likewise for when the
1549 call doesn't want the return value. */
1550 if (retval
1551 && (TREE_CODE (retval) != RESULT_DECL
1552 && (!id->call_stmt
1553 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1554 && (TREE_CODE (retval) != SSA_NAME
1555 || ! SSA_NAME_VAR (retval)
1556 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1558 copy = gimple_build_assign (id->do_not_unshare
1559 ? id->retvar : unshare_expr (id->retvar),
1560 retval);
1561 /* id->retvar is already substituted. Skip it on later remapping. */
1562 skip_first = true;
1564 else
1565 return NULL;
1567 else if (gimple_has_substatements (stmt))
1569 gimple_seq s1, s2;
1571 /* When cloning bodies from the C++ front end, we will be handed bodies
1572 in High GIMPLE form. Handle here all the High GIMPLE statements that
1573 have embedded statements. */
1574 switch (gimple_code (stmt))
1576 case GIMPLE_BIND:
1577 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1578 break;
1580 case GIMPLE_CATCH:
1582 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1583 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1584 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1586 break;
1588 case GIMPLE_EH_FILTER:
1589 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1590 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1591 break;
1593 case GIMPLE_TRY:
1594 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1595 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1596 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1597 break;
1599 case GIMPLE_WITH_CLEANUP_EXPR:
1600 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1601 copy = gimple_build_wce (s1);
1602 break;
1604 case GIMPLE_OMP_PARALLEL:
1606 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1607 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1608 copy = gimple_build_omp_parallel
1609 (s1,
1610 gimple_omp_parallel_clauses (omp_par_stmt),
1611 gimple_omp_parallel_child_fn (omp_par_stmt),
1612 gimple_omp_parallel_data_arg (omp_par_stmt));
1614 break;
1616 case GIMPLE_OMP_TASK:
1617 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1618 copy = gimple_build_omp_task
1619 (s1,
1620 gimple_omp_task_clauses (stmt),
1621 gimple_omp_task_child_fn (stmt),
1622 gimple_omp_task_data_arg (stmt),
1623 gimple_omp_task_copy_fn (stmt),
1624 gimple_omp_task_arg_size (stmt),
1625 gimple_omp_task_arg_align (stmt));
1626 break;
1628 case GIMPLE_OMP_FOR:
1629 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1630 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1631 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1632 gimple_omp_for_clauses (stmt),
1633 gimple_omp_for_collapse (stmt), s2);
1635 size_t i;
1636 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1638 gimple_omp_for_set_index (copy, i,
1639 gimple_omp_for_index (stmt, i));
1640 gimple_omp_for_set_initial (copy, i,
1641 gimple_omp_for_initial (stmt, i));
1642 gimple_omp_for_set_final (copy, i,
1643 gimple_omp_for_final (stmt, i));
1644 gimple_omp_for_set_incr (copy, i,
1645 gimple_omp_for_incr (stmt, i));
1646 gimple_omp_for_set_cond (copy, i,
1647 gimple_omp_for_cond (stmt, i));
1650 break;
1652 case GIMPLE_OMP_MASTER:
1653 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1654 copy = gimple_build_omp_master (s1);
1655 break;
1657 case GIMPLE_OMP_TASKGROUP:
1658 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1659 copy = gimple_build_omp_taskgroup
1660 (s1, gimple_omp_taskgroup_clauses (stmt));
1661 break;
1663 case GIMPLE_OMP_ORDERED:
1664 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1665 copy = gimple_build_omp_ordered
1666 (s1,
1667 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1668 break;
1670 case GIMPLE_OMP_SCAN:
1671 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1672 copy = gimple_build_omp_scan
1673 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1674 break;
1676 case GIMPLE_OMP_SECTION:
1677 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1678 copy = gimple_build_omp_section (s1);
1679 break;
1681 case GIMPLE_OMP_SECTIONS:
1682 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1683 copy = gimple_build_omp_sections
1684 (s1, gimple_omp_sections_clauses (stmt));
1685 break;
1687 case GIMPLE_OMP_SINGLE:
1688 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1689 copy = gimple_build_omp_single
1690 (s1, gimple_omp_single_clauses (stmt));
1691 break;
1693 case GIMPLE_OMP_TARGET:
1694 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1695 copy = gimple_build_omp_target
1696 (s1, gimple_omp_target_kind (stmt),
1697 gimple_omp_target_clauses (stmt));
1698 break;
1700 case GIMPLE_OMP_TEAMS:
1701 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1702 copy = gimple_build_omp_teams
1703 (s1, gimple_omp_teams_clauses (stmt));
1704 break;
1706 case GIMPLE_OMP_CRITICAL:
1707 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1708 copy = gimple_build_omp_critical (s1,
1709 gimple_omp_critical_name
1710 (as_a <gomp_critical *> (stmt)),
1711 gimple_omp_critical_clauses
1712 (as_a <gomp_critical *> (stmt)));
1713 break;
1715 case GIMPLE_TRANSACTION:
1717 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1718 gtransaction *new_trans_stmt;
1719 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1720 id);
1721 copy = new_trans_stmt = gimple_build_transaction (s1);
1722 gimple_transaction_set_subcode (new_trans_stmt,
1723 gimple_transaction_subcode (old_trans_stmt));
1724 gimple_transaction_set_label_norm (new_trans_stmt,
1725 gimple_transaction_label_norm (old_trans_stmt));
1726 gimple_transaction_set_label_uninst (new_trans_stmt,
1727 gimple_transaction_label_uninst (old_trans_stmt));
1728 gimple_transaction_set_label_over (new_trans_stmt,
1729 gimple_transaction_label_over (old_trans_stmt));
1731 break;
1733 default:
1734 gcc_unreachable ();
1737 else
1739 if (gimple_assign_copy_p (stmt)
1740 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1741 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1743 /* Here we handle statements that are not completely rewritten.
1744 First we detect some inlining-induced bogosities for
1745 discarding. */
1747 /* Some assignments VAR = VAR; don't generate any rtl code
1748 and thus don't count as variable modification. Avoid
1749 keeping bogosities like 0 = 0. */
1750 tree decl = gimple_assign_lhs (stmt), value;
1751 tree *n;
1753 n = id->decl_map->get (decl);
1754 if (n)
1756 value = *n;
1757 STRIP_TYPE_NOPS (value);
1758 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1759 return NULL;
1763 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1764 in a block that we aren't copying during tree_function_versioning,
1765 just drop the clobber stmt. */
1766 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1768 tree lhs = gimple_assign_lhs (stmt);
1769 if (TREE_CODE (lhs) == MEM_REF
1770 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1772 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1773 if (gimple_bb (def_stmt)
1774 && !bitmap_bit_p (id->blocks_to_copy,
1775 gimple_bb (def_stmt)->index))
1776 return NULL;
1780 /* We do not allow CLOBBERs of handled components. In case
1781 returned value is stored via such handled component, remove
1782 the clobber so stmt verifier is happy. */
1783 if (gimple_clobber_p (stmt)
1784 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1786 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1787 if (!DECL_P (remapped)
1788 && TREE_CODE (remapped) != MEM_REF)
1789 return NULL;
1792 if (gimple_debug_bind_p (stmt))
1794 gdebug *copy
1795 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1796 gimple_debug_bind_get_value (stmt),
1797 stmt);
1798 if (id->reset_location)
1799 gimple_set_location (copy, input_location);
1800 id->debug_stmts.safe_push (copy);
1801 gimple_seq_add_stmt (&stmts, copy);
1802 return stmts;
1804 if (gimple_debug_source_bind_p (stmt))
1806 gdebug *copy = gimple_build_debug_source_bind
1807 (gimple_debug_source_bind_get_var (stmt),
1808 gimple_debug_source_bind_get_value (stmt),
1809 stmt);
1810 if (id->reset_location)
1811 gimple_set_location (copy, input_location);
1812 id->debug_stmts.safe_push (copy);
1813 gimple_seq_add_stmt (&stmts, copy);
1814 return stmts;
1816 if (gimple_debug_nonbind_marker_p (stmt))
1818 /* If the inlined function has too many debug markers,
1819 don't copy them. */
1820 if (id->src_cfun->debug_marker_count
1821 > param_max_debug_marker_count)
1822 return stmts;
1824 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1825 if (id->reset_location)
1826 gimple_set_location (copy, input_location);
1827 id->debug_stmts.safe_push (copy);
1828 gimple_seq_add_stmt (&stmts, copy);
1829 return stmts;
1832 /* Create a new deep copy of the statement. */
1833 copy = gimple_copy (stmt);
1835 /* Clear flags that need revisiting. */
1836 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1838 if (gimple_call_tail_p (call_stmt))
1839 gimple_call_set_tail (call_stmt, false);
1840 if (gimple_call_from_thunk_p (call_stmt))
1841 gimple_call_set_from_thunk (call_stmt, false);
1842 if (gimple_call_internal_p (call_stmt))
1843 switch (gimple_call_internal_fn (call_stmt))
1845 case IFN_GOMP_SIMD_LANE:
1846 case IFN_GOMP_SIMD_VF:
1847 case IFN_GOMP_SIMD_LAST_LANE:
1848 case IFN_GOMP_SIMD_ORDERED_START:
1849 case IFN_GOMP_SIMD_ORDERED_END:
1850 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1851 break;
1852 default:
1853 break;
1857 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1858 RESX and EH_DISPATCH. */
1859 if (id->eh_map)
1860 switch (gimple_code (copy))
1862 case GIMPLE_CALL:
1864 tree r, fndecl = gimple_call_fndecl (copy);
1865 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1866 switch (DECL_FUNCTION_CODE (fndecl))
1868 case BUILT_IN_EH_COPY_VALUES:
1869 r = gimple_call_arg (copy, 1);
1870 r = remap_eh_region_tree_nr (r, id);
1871 gimple_call_set_arg (copy, 1, r);
1872 /* FALLTHRU */
1874 case BUILT_IN_EH_POINTER:
1875 case BUILT_IN_EH_FILTER:
1876 r = gimple_call_arg (copy, 0);
1877 r = remap_eh_region_tree_nr (r, id);
1878 gimple_call_set_arg (copy, 0, r);
1879 break;
1881 default:
1882 break;
1885 /* Reset alias info if we didn't apply measures to
1886 keep it valid over inlining by setting DECL_PT_UID. */
1887 if (!id->src_cfun->gimple_df
1888 || !id->src_cfun->gimple_df->ipa_pta)
1889 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1891 break;
1893 case GIMPLE_RESX:
1895 gresx *resx_stmt = as_a <gresx *> (copy);
1896 int r = gimple_resx_region (resx_stmt);
1897 r = remap_eh_region_nr (r, id);
1898 gimple_resx_set_region (resx_stmt, r);
1900 break;
1902 case GIMPLE_EH_DISPATCH:
1904 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1905 int r = gimple_eh_dispatch_region (eh_dispatch);
1906 r = remap_eh_region_nr (r, id);
1907 gimple_eh_dispatch_set_region (eh_dispatch, r);
1909 break;
1911 default:
1912 break;
1916 /* If STMT has a block defined, map it to the newly constructed block. */
1917 if (tree block = gimple_block (copy))
1919 tree *n;
1920 n = id->decl_map->get (block);
1921 gcc_assert (n);
1922 gimple_set_block (copy, *n);
1924 if (id->param_body_adjs)
1926 gimple_seq extra_stmts = NULL;
1927 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
1928 if (!gimple_seq_empty_p (extra_stmts))
1930 memset (&wi, 0, sizeof (wi));
1931 wi.info = id;
1932 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1933 !gsi_end_p (egsi);
1934 gsi_next (&egsi))
1935 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1936 gimple_seq_add_seq (&stmts, extra_stmts);
1940 if (id->reset_location)
1941 gimple_set_location (copy, input_location);
1943 /* Debug statements ought to be rebuilt and not copied. */
1944 gcc_checking_assert (!is_gimple_debug (copy));
1946 /* Remap all the operands in COPY. */
1947 memset (&wi, 0, sizeof (wi));
1948 wi.info = id;
1949 if (skip_first)
1950 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1951 else
1952 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1954 /* Clear the copied virtual operands. We are not remapping them here
1955 but are going to recreate them from scratch. */
1956 if (gimple_has_mem_ops (copy))
1958 gimple_set_vdef (copy, NULL_TREE);
1959 gimple_set_vuse (copy, NULL_TREE);
1962 if (cfun->can_throw_non_call_exceptions)
1964 /* When inlining a function which does not have non-call exceptions
1965 enabled into a function that has (which only happens with
1966 always-inline) we have to fixup stmts that cannot throw. */
1967 if (gcond *cond = dyn_cast <gcond *> (copy))
1968 if (gimple_could_trap_p (cond))
1970 gassign *cmp
1971 = gimple_build_assign (make_ssa_name (boolean_type_node),
1972 gimple_cond_code (cond),
1973 gimple_cond_lhs (cond),
1974 gimple_cond_rhs (cond));
1975 gimple_seq_add_stmt (&stmts, cmp);
1976 gimple_cond_set_code (cond, NE_EXPR);
1977 gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
1978 gimple_cond_set_rhs (cond, boolean_false_node);
1982 gimple_seq_add_stmt (&stmts, copy);
1983 return stmts;
1987 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1988 later */
1990 static basic_block
1991 copy_bb (copy_body_data *id, basic_block bb,
1992 profile_count num, profile_count den)
1994 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1995 basic_block copy_basic_block;
1996 tree decl;
1997 basic_block prev;
1999 profile_count::adjust_for_ipa_scaling (&num, &den);
2001 /* Search for previous copied basic block. */
2002 prev = bb->prev_bb;
2003 while (!prev->aux)
2004 prev = prev->prev_bb;
2006 /* create_basic_block() will append every new block to
2007 basic_block_info automatically. */
2008 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2009 copy_basic_block->count = bb->count.apply_scale (num, den);
2011 copy_gsi = gsi_start_bb (copy_basic_block);
2013 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2015 gimple_seq stmts;
2016 gimple *stmt = gsi_stmt (gsi);
2017 gimple *orig_stmt = stmt;
2018 gimple_stmt_iterator stmts_gsi;
2019 bool stmt_added = false;
2021 id->regimplify = false;
2022 stmts = remap_gimple_stmt (stmt, id);
2024 if (gimple_seq_empty_p (stmts))
2025 continue;
2027 seq_gsi = copy_gsi;
2029 for (stmts_gsi = gsi_start (stmts);
2030 !gsi_end_p (stmts_gsi); )
2032 stmt = gsi_stmt (stmts_gsi);
2034 /* Advance iterator now before stmt is moved to seq_gsi. */
2035 gsi_next (&stmts_gsi);
2037 if (gimple_nop_p (stmt))
2038 continue;
2040 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2041 orig_stmt);
2043 /* With return slot optimization we can end up with
2044 non-gimple (foo *)&this->m, fix that here. */
2045 if (is_gimple_assign (stmt)
2046 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2047 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2049 tree new_rhs;
2050 new_rhs = force_gimple_operand_gsi (&seq_gsi,
2051 gimple_assign_rhs1 (stmt),
2052 true, NULL, false,
2053 GSI_CONTINUE_LINKING);
2054 gimple_assign_set_rhs1 (stmt, new_rhs);
2055 id->regimplify = false;
2058 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2060 if (id->regimplify)
2061 gimple_regimplify_operands (stmt, &seq_gsi);
2063 stmt_added = true;
2066 if (!stmt_added)
2067 continue;
2069 /* If copy_basic_block has been empty at the start of this iteration,
2070 call gsi_start_bb again to get at the newly added statements. */
2071 if (gsi_end_p (copy_gsi))
2072 copy_gsi = gsi_start_bb (copy_basic_block);
2073 else
2074 gsi_next (&copy_gsi);
2076 /* Process the new statement. The call to gimple_regimplify_operands
2077 possibly turned the statement into multiple statements, we
2078 need to process all of them. */
2081 tree fn;
2082 gcall *call_stmt;
2084 stmt = gsi_stmt (copy_gsi);
2085 call_stmt = dyn_cast <gcall *> (stmt);
2086 if (call_stmt
2087 && gimple_call_va_arg_pack_p (call_stmt)
2088 && id->call_stmt
2089 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2091 /* __builtin_va_arg_pack () should be replaced by
2092 all arguments corresponding to ... in the caller. */
2093 tree p;
2094 gcall *new_call;
2095 vec<tree> argarray;
2096 size_t nargs = gimple_call_num_args (id->call_stmt);
2097 size_t n;
2099 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2100 nargs--;
2102 /* Create the new array of arguments. */
2103 n = nargs + gimple_call_num_args (call_stmt);
2104 argarray.create (n);
2105 argarray.safe_grow_cleared (n, true);
2107 /* Copy all the arguments before '...' */
2108 memcpy (argarray.address (),
2109 gimple_call_arg_ptr (call_stmt, 0),
2110 gimple_call_num_args (call_stmt) * sizeof (tree));
2112 /* Append the arguments passed in '...' */
2113 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2114 gimple_call_arg_ptr (id->call_stmt, 0)
2115 + (gimple_call_num_args (id->call_stmt) - nargs),
2116 nargs * sizeof (tree));
2118 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2119 argarray);
2121 argarray.release ();
2123 /* Copy all GIMPLE_CALL flags, location and block, except
2124 GF_CALL_VA_ARG_PACK. */
2125 gimple_call_copy_flags (new_call, call_stmt);
2126 gimple_call_set_va_arg_pack (new_call, false);
2127 /* location includes block. */
2128 gimple_set_location (new_call, gimple_location (stmt));
2129 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2131 gsi_replace (&copy_gsi, new_call, false);
2132 stmt = new_call;
2134 else if (call_stmt
2135 && id->call_stmt
2136 && (decl = gimple_call_fndecl (stmt))
2137 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2139 /* __builtin_va_arg_pack_len () should be replaced by
2140 the number of anonymous arguments. */
2141 size_t nargs = gimple_call_num_args (id->call_stmt);
2142 tree count, p;
2143 gimple *new_stmt;
2145 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2146 nargs--;
2148 if (!gimple_call_lhs (stmt))
2150 /* Drop unused calls. */
2151 gsi_remove (&copy_gsi, false);
2152 continue;
2154 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2156 count = build_int_cst (integer_type_node, nargs);
2157 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2158 gsi_replace (&copy_gsi, new_stmt, false);
2159 stmt = new_stmt;
2161 else if (nargs != 0)
2163 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2164 count = build_int_cst (integer_type_node, nargs);
2165 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2166 PLUS_EXPR, newlhs, count);
2167 gimple_call_set_lhs (stmt, newlhs);
2168 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2171 else if (call_stmt
2172 && id->call_stmt
2173 && gimple_call_internal_p (stmt)
2174 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2176 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2177 gsi_remove (&copy_gsi, false);
2178 continue;
2181 /* Statements produced by inlining can be unfolded, especially
2182 when we constant propagated some operands. We can't fold
2183 them right now for two reasons:
2184 1) folding require SSA_NAME_DEF_STMTs to be correct
2185 2) we can't change function calls to builtins.
2186 So we just mark statement for later folding. We mark
2187 all new statements, instead just statements that has changed
2188 by some nontrivial substitution so even statements made
2189 foldable indirectly are updated. If this turns out to be
2190 expensive, copy_body can be told to watch for nontrivial
2191 changes. */
2192 if (id->statements_to_fold)
2193 id->statements_to_fold->add (stmt);
2195 /* We're duplicating a CALL_EXPR. Find any corresponding
2196 callgraph edges and update or duplicate them. */
2197 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2199 struct cgraph_edge *edge;
2201 switch (id->transform_call_graph_edges)
2203 case CB_CGE_DUPLICATE:
2204 edge = id->src_node->get_edge (orig_stmt);
2205 if (edge)
2207 struct cgraph_edge *old_edge = edge;
2209 /* A speculative call is consist of multiple
2210 edges - indirect edge and one or more direct edges
2211 Duplicate the whole thing and distribute frequencies
2212 accordingly. */
2213 if (edge->speculative)
2215 int n = 0;
2216 profile_count direct_cnt
2217 = profile_count::zero ();
2219 /* First figure out the distribution of counts
2220 so we can re-scale BB profile accordingly. */
2221 for (cgraph_edge *e = old_edge; e;
2222 e = e->next_speculative_call_target ())
2223 direct_cnt = direct_cnt + e->count;
2225 cgraph_edge *indirect
2226 = old_edge->speculative_call_indirect_edge ();
2227 profile_count indir_cnt = indirect->count;
2229 /* Next iterate all direct edges, clone it and its
2230 corresponding reference and update profile. */
2231 for (cgraph_edge *e = old_edge;
2233 e = e->next_speculative_call_target ())
2235 profile_count cnt = e->count;
2237 id->dst_node->clone_reference
2238 (e->speculative_call_target_ref (), stmt);
2239 edge = e->clone (id->dst_node, call_stmt,
2240 gimple_uid (stmt), num, den,
2241 true);
2242 profile_probability prob
2243 = cnt.probability_in (direct_cnt
2244 + indir_cnt);
2245 edge->count
2246 = copy_basic_block->count.apply_probability
2247 (prob);
2248 n++;
2250 gcc_checking_assert
2251 (indirect->num_speculative_call_targets_p ()
2252 == n);
2254 /* Duplicate the indirect edge after all direct edges
2255 cloned. */
2256 indirect = indirect->clone (id->dst_node, call_stmt,
2257 gimple_uid (stmt),
2258 num, den,
2259 true);
2261 profile_probability prob
2262 = indir_cnt.probability_in (direct_cnt
2263 + indir_cnt);
2264 indirect->count
2265 = copy_basic_block->count.apply_probability (prob);
2267 else
2269 edge = edge->clone (id->dst_node, call_stmt,
2270 gimple_uid (stmt),
2271 num, den,
2272 true);
2273 edge->count = copy_basic_block->count;
2276 break;
2278 case CB_CGE_MOVE_CLONES:
2279 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2280 call_stmt);
2281 edge = id->dst_node->get_edge (stmt);
2282 break;
2284 case CB_CGE_MOVE:
2285 edge = id->dst_node->get_edge (orig_stmt);
2286 if (edge)
2287 edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2288 break;
2290 default:
2291 gcc_unreachable ();
2294 /* Constant propagation on argument done during inlining
2295 may create new direct call. Produce an edge for it. */
2296 if ((!edge
2297 || (edge->indirect_inlining_edge
2298 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2299 && id->dst_node->definition
2300 && (fn = gimple_call_fndecl (stmt)) != NULL)
2302 struct cgraph_node *dest = cgraph_node::get_create (fn);
2304 /* We have missing edge in the callgraph. This can happen
2305 when previous inlining turned an indirect call into a
2306 direct call by constant propagating arguments or we are
2307 producing dead clone (for further cloning). In all
2308 other cases we hit a bug (incorrect node sharing is the
2309 most common reason for missing edges). */
2310 gcc_assert (!dest->definition
2311 || dest->address_taken
2312 || !id->src_node->definition
2313 || !id->dst_node->definition);
2314 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2315 id->dst_node->create_edge_including_clones
2316 (dest, orig_stmt, call_stmt, bb->count,
2317 CIF_ORIGINALLY_INDIRECT_CALL);
2318 else
2319 id->dst_node->create_edge (dest, call_stmt,
2320 bb->count)->inline_failed
2321 = CIF_ORIGINALLY_INDIRECT_CALL;
2322 if (dump_file)
2324 fprintf (dump_file, "Created new direct edge to %s\n",
2325 dest->dump_name ());
2329 notice_special_calls (as_a <gcall *> (stmt));
2332 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2333 id->eh_map, id->eh_lp_nr);
2335 gsi_next (&copy_gsi);
2337 while (!gsi_end_p (copy_gsi));
2339 copy_gsi = gsi_last_bb (copy_basic_block);
2342 return copy_basic_block;
2345 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2346 form is quite easy, since dominator relationship for old basic blocks does
2347 not change.
2349 There is however exception where inlining might change dominator relation
2350 across EH edges from basic block within inlined functions destinating
2351 to landing pads in function we inline into.
2353 The function fills in PHI_RESULTs of such PHI nodes if they refer
2354 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2355 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2356 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2357 set, and this means that there will be no overlapping live ranges
2358 for the underlying symbol.
2360 This might change in future if we allow redirecting of EH edges and
2361 we might want to change way build CFG pre-inlining to include
2362 all the possible edges then. */
2363 static void
2364 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2365 bool can_throw, bool nonlocal_goto)
2367 edge e;
2368 edge_iterator ei;
2370 FOR_EACH_EDGE (e, ei, bb->succs)
2371 if (!e->dest->aux
2372 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2374 gphi *phi;
2375 gphi_iterator si;
2377 if (!nonlocal_goto)
2378 gcc_assert (e->flags & EDGE_EH);
2380 if (!can_throw)
2381 gcc_assert (!(e->flags & EDGE_EH));
2383 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2385 edge re;
2387 phi = si.phi ();
2389 /* For abnormal goto/call edges the receiver can be the
2390 ENTRY_BLOCK. Do not assert this cannot happen. */
2392 gcc_assert ((e->flags & EDGE_EH)
2393 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2395 re = find_edge (ret_bb, e->dest);
2396 gcc_checking_assert (re);
2397 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2398 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2400 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2401 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2406 /* Insert clobbers for automatic variables of inlined ID->src_fn
2407 function at the start of basic block ID->eh_landing_pad_dest. */
2409 static void
2410 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2412 tree var;
2413 basic_block bb = id->eh_landing_pad_dest;
2414 live_vars_map *vars = NULL;
2415 unsigned int cnt = 0;
2416 unsigned int i;
2417 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2418 if (VAR_P (var)
2419 && !DECL_HARD_REGISTER (var)
2420 && !TREE_THIS_VOLATILE (var)
2421 && !DECL_HAS_VALUE_EXPR_P (var)
2422 && !is_gimple_reg (var)
2423 && auto_var_in_fn_p (var, id->src_fn)
2424 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2426 tree *t = id->decl_map->get (var);
2427 if (!t)
2428 continue;
2429 tree new_var = *t;
2430 if (VAR_P (new_var)
2431 && !DECL_HARD_REGISTER (new_var)
2432 && !TREE_THIS_VOLATILE (new_var)
2433 && !DECL_HAS_VALUE_EXPR_P (new_var)
2434 && !is_gimple_reg (new_var)
2435 && auto_var_in_fn_p (new_var, id->dst_fn))
2437 if (vars == NULL)
2438 vars = new live_vars_map;
2439 vars->put (DECL_UID (var), cnt++);
2442 if (vars == NULL)
2443 return;
2445 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2446 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2447 if (VAR_P (var))
2449 edge e;
2450 edge_iterator ei;
2451 bool needed = false;
2452 unsigned int *v = vars->get (DECL_UID (var));
2453 if (v == NULL)
2454 continue;
2455 FOR_EACH_EDGE (e, ei, bb->preds)
2456 if ((e->flags & EDGE_EH) != 0
2457 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2459 basic_block src_bb = (basic_block) e->src->aux;
2461 if (bitmap_bit_p (&live[src_bb->index], *v))
2463 needed = true;
2464 break;
2467 if (needed)
2469 tree new_var = *id->decl_map->get (var);
2470 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2471 tree clobber = build_clobber (TREE_TYPE (new_var));
2472 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2473 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2476 destroy_live_vars (live);
2477 delete vars;
2480 /* Copy edges from BB into its copy constructed earlier, scale profile
2481 accordingly. Edges will be taken care of later. Assume aux
2482 pointers to point to the copies of each BB. Return true if any
2483 debug stmts are left after a statement that must end the basic block. */
2485 static bool
2486 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2487 basic_block ret_bb, basic_block abnormal_goto_dest,
2488 copy_body_data *id)
2490 basic_block new_bb = (basic_block) bb->aux;
2491 edge_iterator ei;
2492 edge old_edge;
2493 gimple_stmt_iterator si;
2494 bool need_debug_cleanup = false;
2496 /* Use the indices from the original blocks to create edges for the
2497 new ones. */
2498 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2499 if (!(old_edge->flags & EDGE_EH))
2501 edge new_edge;
2502 int flags = old_edge->flags;
2503 location_t locus = old_edge->goto_locus;
2505 /* Return edges do get a FALLTHRU flag when they get inlined. */
2506 if (old_edge->dest->index == EXIT_BLOCK
2507 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2508 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2509 flags |= EDGE_FALLTHRU;
2511 new_edge
2512 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2513 new_edge->probability = old_edge->probability;
2514 if (!id->reset_location)
2515 new_edge->goto_locus = remap_location (locus, id);
2518 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2519 return false;
2521 /* When doing function splitting, we must decrease count of the return block
2522 which was previously reachable by block we did not copy. */
2523 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2524 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2525 if (old_edge->src->index != ENTRY_BLOCK
2526 && !old_edge->src->aux)
2527 new_bb->count -= old_edge->count ().apply_scale (num, den);
2529 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2531 gimple *copy_stmt;
2532 bool can_throw, nonlocal_goto;
2534 copy_stmt = gsi_stmt (si);
2535 if (!is_gimple_debug (copy_stmt))
2536 update_stmt (copy_stmt);
2538 /* Do this before the possible split_block. */
2539 gsi_next (&si);
2541 /* If this tree could throw an exception, there are two
2542 cases where we need to add abnormal edge(s): the
2543 tree wasn't in a region and there is a "current
2544 region" in the caller; or the original tree had
2545 EH edges. In both cases split the block after the tree,
2546 and add abnormal edge(s) as needed; we need both
2547 those from the callee and the caller.
2548 We check whether the copy can throw, because the const
2549 propagation can change an INDIRECT_REF which throws
2550 into a COMPONENT_REF which doesn't. If the copy
2551 can throw, the original could also throw. */
2552 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2553 nonlocal_goto
2554 = (stmt_can_make_abnormal_goto (copy_stmt)
2555 && !computed_goto_p (copy_stmt));
2557 if (can_throw || nonlocal_goto)
2559 if (!gsi_end_p (si))
2561 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2562 gsi_next (&si);
2563 if (gsi_end_p (si))
2564 need_debug_cleanup = true;
2566 if (!gsi_end_p (si))
2567 /* Note that bb's predecessor edges aren't necessarily
2568 right at this point; split_block doesn't care. */
2570 edge e = split_block (new_bb, copy_stmt);
2572 new_bb = e->dest;
2573 new_bb->aux = e->src->aux;
2574 si = gsi_start_bb (new_bb);
2578 bool update_probs = false;
2580 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2582 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2583 update_probs = true;
2585 else if (can_throw)
2587 make_eh_edges (copy_stmt);
2588 update_probs = true;
2591 /* EH edges may not match old edges. Copy as much as possible. */
2592 if (update_probs)
2594 edge e;
2595 edge_iterator ei;
2596 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2598 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2599 if ((old_edge->flags & EDGE_EH)
2600 && (e = find_edge (copy_stmt_bb,
2601 (basic_block) old_edge->dest->aux))
2602 && (e->flags & EDGE_EH))
2603 e->probability = old_edge->probability;
2605 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2606 if (e->flags & EDGE_EH)
2608 if (!e->probability.initialized_p ())
2609 e->probability = profile_probability::never ();
2610 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2612 if (id->eh_landing_pad_dest == NULL)
2613 id->eh_landing_pad_dest = e->dest;
2614 else
2615 gcc_assert (id->eh_landing_pad_dest == e->dest);
2621 /* If the call we inline cannot make abnormal goto do not add
2622 additional abnormal edges but only retain those already present
2623 in the original function body. */
2624 if (abnormal_goto_dest == NULL)
2625 nonlocal_goto = false;
2626 if (nonlocal_goto)
2628 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2630 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2631 nonlocal_goto = false;
2632 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2633 in OpenMP regions which aren't allowed to be left abnormally.
2634 So, no need to add abnormal edge in that case. */
2635 else if (is_gimple_call (copy_stmt)
2636 && gimple_call_internal_p (copy_stmt)
2637 && (gimple_call_internal_fn (copy_stmt)
2638 == IFN_ABNORMAL_DISPATCHER)
2639 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2640 nonlocal_goto = false;
2641 else
2642 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2643 EDGE_ABNORMAL);
2646 if ((can_throw || nonlocal_goto)
2647 && gimple_in_ssa_p (cfun))
2648 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2649 can_throw, nonlocal_goto);
2651 return need_debug_cleanup;
2654 /* Copy the PHIs. All blocks and edges are copied, some blocks
2655 was possibly split and new outgoing EH edges inserted.
2656 BB points to the block of original function and AUX pointers links
2657 the original and newly copied blocks. */
2659 static void
2660 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2662 basic_block const new_bb = (basic_block) bb->aux;
2663 edge_iterator ei;
2664 gphi *phi;
2665 gphi_iterator si;
2666 edge new_edge;
2667 bool inserted = false;
2669 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2671 tree res, new_res;
2672 gphi *new_phi;
2674 phi = si.phi ();
2675 res = PHI_RESULT (phi);
2676 new_res = res;
2677 if (!virtual_operand_p (res))
2679 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2680 if (EDGE_COUNT (new_bb->preds) == 0)
2682 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2683 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2685 else
2687 new_phi = create_phi_node (new_res, new_bb);
2688 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2690 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2691 bb);
2692 tree arg;
2693 tree new_arg;
2694 edge_iterator ei2;
2695 location_t locus;
2697 /* When doing partial cloning, we allow PHIs on the entry
2698 block as long as all the arguments are the same.
2699 Find any input edge to see argument to copy. */
2700 if (!old_edge)
2701 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2702 if (!old_edge->src->aux)
2703 break;
2705 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2706 new_arg = arg;
2707 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2708 gcc_assert (new_arg);
2709 /* With return slot optimization we can end up with
2710 non-gimple (foo *)&this->m, fix that here. */
2711 if (TREE_CODE (new_arg) != SSA_NAME
2712 && TREE_CODE (new_arg) != FUNCTION_DECL
2713 && !is_gimple_val (new_arg))
2715 gimple_seq stmts = NULL;
2716 new_arg = force_gimple_operand (new_arg, &stmts, true,
2717 NULL);
2718 gsi_insert_seq_on_edge (new_edge, stmts);
2719 inserted = true;
2721 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2722 if (id->reset_location)
2723 locus = input_location;
2724 else
2725 locus = remap_location (locus, id);
2726 add_phi_arg (new_phi, new_arg, new_edge, locus);
2732 /* Commit the delayed edge insertions. */
2733 if (inserted)
2734 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2735 gsi_commit_one_edge_insert (new_edge, NULL);
2739 /* Wrapper for remap_decl so it can be used as a callback. */
2741 static tree
2742 remap_decl_1 (tree decl, void *data)
2744 return remap_decl (decl, (copy_body_data *) data);
2747 /* Build struct function and associated datastructures for the new clone
2748 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2749 the cfun to the function of new_fndecl (and current_function_decl too). */
2751 static void
2752 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2754 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2756 if (!DECL_ARGUMENTS (new_fndecl))
2757 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2758 if (!DECL_RESULT (new_fndecl))
2759 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2761 /* Register specific tree functions. */
2762 gimple_register_cfg_hooks ();
2764 /* Get clean struct function. */
2765 push_struct_function (new_fndecl);
2767 /* We will rebuild these, so just sanity check that they are empty. */
2768 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2769 gcc_assert (cfun->local_decls == NULL);
2770 gcc_assert (cfun->cfg == NULL);
2771 gcc_assert (cfun->decl == new_fndecl);
2773 /* Copy items we preserve during cloning. */
2774 cfun->static_chain_decl = src_cfun->static_chain_decl;
2775 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2776 cfun->function_end_locus = src_cfun->function_end_locus;
2777 cfun->curr_properties = src_cfun->curr_properties;
2778 cfun->last_verified = src_cfun->last_verified;
2779 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2780 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2781 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2782 cfun->calls_eh_return = src_cfun->calls_eh_return;
2783 cfun->stdarg = src_cfun->stdarg;
2784 cfun->after_inlining = src_cfun->after_inlining;
2785 cfun->can_throw_non_call_exceptions
2786 = src_cfun->can_throw_non_call_exceptions;
2787 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2788 cfun->returns_struct = src_cfun->returns_struct;
2789 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2791 init_empty_tree_cfg ();
2793 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2795 profile_count num = count;
2796 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2797 profile_count::adjust_for_ipa_scaling (&num, &den);
2799 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2800 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2801 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2802 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2803 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2804 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2805 if (src_cfun->eh)
2806 init_eh_for_function ();
2808 if (src_cfun->gimple_df)
2810 init_tree_ssa (cfun);
2811 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2812 if (cfun->gimple_df->in_ssa_p)
2813 init_ssa_operands (cfun);
2817 /* Helper function for copy_cfg_body. Move debug stmts from the end
2818 of NEW_BB to the beginning of successor basic blocks when needed. If the
2819 successor has multiple predecessors, reset them, otherwise keep
2820 their value. */
2822 static void
2823 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2825 edge e;
2826 edge_iterator ei;
2827 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2829 if (gsi_end_p (si)
2830 || gsi_one_before_end_p (si)
2831 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2832 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2833 return;
2835 FOR_EACH_EDGE (e, ei, new_bb->succs)
2837 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2838 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2839 while (is_gimple_debug (gsi_stmt (ssi)))
2841 gimple *stmt = gsi_stmt (ssi);
2842 gdebug *new_stmt;
2843 tree var;
2844 tree value;
2846 /* For the last edge move the debug stmts instead of copying
2847 them. */
2848 if (ei_one_before_end_p (ei))
2850 si = ssi;
2851 gsi_prev (&ssi);
2852 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2854 gimple_debug_bind_reset_value (stmt);
2855 gimple_set_location (stmt, UNKNOWN_LOCATION);
2857 gsi_remove (&si, false);
2858 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2859 continue;
2862 if (gimple_debug_bind_p (stmt))
2864 var = gimple_debug_bind_get_var (stmt);
2865 if (single_pred_p (e->dest))
2867 value = gimple_debug_bind_get_value (stmt);
2868 value = unshare_expr (value);
2869 new_stmt = gimple_build_debug_bind (var, value, stmt);
2871 else
2872 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2874 else if (gimple_debug_source_bind_p (stmt))
2876 var = gimple_debug_source_bind_get_var (stmt);
2877 value = gimple_debug_source_bind_get_value (stmt);
2878 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2880 else if (gimple_debug_nonbind_marker_p (stmt))
2881 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2882 else
2883 gcc_unreachable ();
2884 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2885 id->debug_stmts.safe_push (new_stmt);
2886 gsi_prev (&ssi);
2891 /* Make a copy of the sub-loops of SRC_PARENT and place them
2892 as siblings of DEST_PARENT. */
2894 static void
2895 copy_loops (copy_body_data *id,
2896 class loop *dest_parent, class loop *src_parent)
2898 class loop *src_loop = src_parent->inner;
2899 while (src_loop)
2901 if (!id->blocks_to_copy
2902 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2904 class loop *dest_loop = alloc_loop ();
2906 /* Assign the new loop its header and latch and associate
2907 those with the new loop. */
2908 dest_loop->header = (basic_block)src_loop->header->aux;
2909 dest_loop->header->loop_father = dest_loop;
2910 if (src_loop->latch != NULL)
2912 dest_loop->latch = (basic_block)src_loop->latch->aux;
2913 dest_loop->latch->loop_father = dest_loop;
2916 /* Copy loop meta-data. */
2917 copy_loop_info (src_loop, dest_loop);
2918 if (dest_loop->unroll)
2919 cfun->has_unroll = true;
2920 if (dest_loop->force_vectorize)
2921 cfun->has_force_vectorize_loops = true;
2922 if (id->src_cfun->last_clique != 0)
2923 dest_loop->owned_clique
2924 = remap_dependence_clique (id,
2925 src_loop->owned_clique
2926 ? src_loop->owned_clique : 1);
2928 /* Finally place it into the loop array and the loop tree. */
2929 place_new_loop (cfun, dest_loop);
2930 flow_loop_tree_node_add (dest_parent, dest_loop);
2932 if (src_loop->simduid)
2934 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2935 cfun->has_simduid_loops = true;
2938 /* Recurse. */
2939 copy_loops (id, dest_loop, src_loop);
2941 src_loop = src_loop->next;
2945 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2947 void
2948 redirect_all_calls (copy_body_data * id, basic_block bb)
2950 gimple_stmt_iterator si;
2951 gimple *last = last_stmt (bb);
2952 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2954 gimple *stmt = gsi_stmt (si);
2955 if (is_gimple_call (stmt))
2957 tree old_lhs = gimple_call_lhs (stmt);
2958 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2959 if (edge)
2961 gimple *new_stmt
2962 = cgraph_edge::redirect_call_stmt_to_callee (edge);
2963 /* If IPA-SRA transformation, run as part of edge redirection,
2964 removed the LHS because it is unused, save it to
2965 killed_new_ssa_names so that we can prune it from debug
2966 statements. */
2967 if (old_lhs
2968 && TREE_CODE (old_lhs) == SSA_NAME
2969 && !gimple_call_lhs (new_stmt))
2971 if (!id->killed_new_ssa_names)
2972 id->killed_new_ssa_names = new hash_set<tree> (16);
2973 id->killed_new_ssa_names->add (old_lhs);
2976 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2977 gimple_purge_dead_eh_edges (bb);
2983 /* Make a copy of the body of FN so that it can be inserted inline in
2984 another function. Walks FN via CFG, returns new fndecl. */
2986 static tree
2987 copy_cfg_body (copy_body_data * id,
2988 basic_block entry_block_map, basic_block exit_block_map,
2989 basic_block new_entry)
2991 tree callee_fndecl = id->src_fn;
2992 /* Original cfun for the callee, doesn't change. */
2993 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2994 struct function *cfun_to_copy;
2995 basic_block bb;
2996 tree new_fndecl = NULL;
2997 bool need_debug_cleanup = false;
2998 int last;
2999 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3000 profile_count num = entry_block_map->count;
3002 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3004 /* Register specific tree functions. */
3005 gimple_register_cfg_hooks ();
3007 /* If we are inlining just region of the function, make sure to connect
3008 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
3009 part of loop, we must compute frequency and probability of
3010 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3011 probabilities of edges incoming from nonduplicated region. */
3012 if (new_entry)
3014 edge e;
3015 edge_iterator ei;
3016 den = profile_count::zero ();
3018 FOR_EACH_EDGE (e, ei, new_entry->preds)
3019 if (!e->src->aux)
3020 den += e->count ();
3021 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
3024 profile_count::adjust_for_ipa_scaling (&num, &den);
3026 /* Must have a CFG here at this point. */
3027 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3028 (DECL_STRUCT_FUNCTION (callee_fndecl)));
3031 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3032 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3033 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3034 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3036 /* Duplicate any exception-handling regions. */
3037 if (cfun->eh)
3038 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3039 remap_decl_1, id);
3041 /* Use aux pointers to map the original blocks to copy. */
3042 FOR_EACH_BB_FN (bb, cfun_to_copy)
3043 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3045 basic_block new_bb = copy_bb (id, bb, num, den);
3046 bb->aux = new_bb;
3047 new_bb->aux = bb;
3048 new_bb->loop_father = entry_block_map->loop_father;
3051 last = last_basic_block_for_fn (cfun);
3053 /* Now that we've duplicated the blocks, duplicate their edges. */
3054 basic_block abnormal_goto_dest = NULL;
3055 if (id->call_stmt
3056 && stmt_can_make_abnormal_goto (id->call_stmt))
3058 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3060 bb = gimple_bb (id->call_stmt);
3061 gsi_next (&gsi);
3062 if (gsi_end_p (gsi))
3063 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3065 FOR_ALL_BB_FN (bb, cfun_to_copy)
3066 if (!id->blocks_to_copy
3067 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3068 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3069 abnormal_goto_dest, id);
3071 if (id->eh_landing_pad_dest)
3073 add_clobbers_to_eh_landing_pad (id);
3074 id->eh_landing_pad_dest = NULL;
3077 if (new_entry)
3079 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3080 EDGE_FALLTHRU);
3081 e->probability = profile_probability::always ();
3084 /* Duplicate the loop tree, if available and wanted. */
3085 if (loops_for_fn (src_cfun) != NULL
3086 && current_loops != NULL)
3088 copy_loops (id, entry_block_map->loop_father,
3089 get_loop (src_cfun, 0));
3090 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3091 loops_state_set (LOOPS_NEED_FIXUP);
3094 /* If the loop tree in the source function needed fixup, mark the
3095 destination loop tree for fixup, too. */
3096 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3097 loops_state_set (LOOPS_NEED_FIXUP);
3099 if (gimple_in_ssa_p (cfun))
3100 FOR_ALL_BB_FN (bb, cfun_to_copy)
3101 if (!id->blocks_to_copy
3102 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3103 copy_phis_for_bb (bb, id);
3105 FOR_ALL_BB_FN (bb, cfun_to_copy)
3106 if (bb->aux)
3108 if (need_debug_cleanup
3109 && bb->index != ENTRY_BLOCK
3110 && bb->index != EXIT_BLOCK)
3111 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3112 /* Update call edge destinations. This cannot be done before loop
3113 info is updated, because we may split basic blocks. */
3114 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3115 && bb->index != ENTRY_BLOCK
3116 && bb->index != EXIT_BLOCK)
3117 redirect_all_calls (id, (basic_block)bb->aux);
3118 ((basic_block)bb->aux)->aux = NULL;
3119 bb->aux = NULL;
3122 /* Zero out AUX fields of newly created block during EH edge
3123 insertion. */
3124 for (; last < last_basic_block_for_fn (cfun); last++)
3126 if (need_debug_cleanup)
3127 maybe_move_debug_stmts_to_successors (id,
3128 BASIC_BLOCK_FOR_FN (cfun, last));
3129 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3130 /* Update call edge destinations. This cannot be done before loop
3131 info is updated, because we may split basic blocks. */
3132 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3133 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3135 entry_block_map->aux = NULL;
3136 exit_block_map->aux = NULL;
3138 if (id->eh_map)
3140 delete id->eh_map;
3141 id->eh_map = NULL;
3143 if (id->dependence_map)
3145 delete id->dependence_map;
3146 id->dependence_map = NULL;
3149 return new_fndecl;
3152 /* Copy the debug STMT using ID. We deal with these statements in a
3153 special way: if any variable in their VALUE expression wasn't
3154 remapped yet, we won't remap it, because that would get decl uids
3155 out of sync, causing codegen differences between -g and -g0. If
3156 this arises, we drop the VALUE expression altogether. */
3158 static void
3159 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3161 tree t, *n;
3162 struct walk_stmt_info wi;
3164 if (tree block = gimple_block (stmt))
3166 n = id->decl_map->get (block);
3167 gimple_set_block (stmt, n ? *n : id->block);
3170 if (gimple_debug_nonbind_marker_p (stmt))
3171 return;
3173 /* Remap all the operands in COPY. */
3174 memset (&wi, 0, sizeof (wi));
3175 wi.info = id;
3177 processing_debug_stmt = 1;
3179 if (gimple_debug_source_bind_p (stmt))
3180 t = gimple_debug_source_bind_get_var (stmt);
3181 else if (gimple_debug_bind_p (stmt))
3182 t = gimple_debug_bind_get_var (stmt);
3183 else
3184 gcc_unreachable ();
3186 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3187 && (n = id->debug_map->get (t)))
3189 gcc_assert (VAR_P (*n));
3190 t = *n;
3192 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3193 /* T is a non-localized variable. */;
3194 else
3195 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3197 if (gimple_debug_bind_p (stmt))
3199 gimple_debug_bind_set_var (stmt, t);
3201 if (gimple_debug_bind_has_value_p (stmt))
3202 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3203 remap_gimple_op_r, &wi, NULL);
3205 /* Punt if any decl couldn't be remapped. */
3206 if (processing_debug_stmt < 0)
3207 gimple_debug_bind_reset_value (stmt);
3209 else if (gimple_debug_source_bind_p (stmt))
3211 gimple_debug_source_bind_set_var (stmt, t);
3212 /* When inlining and source bind refers to one of the optimized
3213 away parameters, change the source bind into normal debug bind
3214 referring to the corresponding DEBUG_EXPR_DECL that should have
3215 been bound before the call stmt. */
3216 t = gimple_debug_source_bind_get_value (stmt);
3217 if (t != NULL_TREE
3218 && TREE_CODE (t) == PARM_DECL
3219 && id->call_stmt)
3221 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3222 unsigned int i;
3223 if (debug_args != NULL)
3225 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3226 if ((**debug_args)[i] == DECL_ORIGIN (t)
3227 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3229 t = (**debug_args)[i + 1];
3230 stmt->subcode = GIMPLE_DEBUG_BIND;
3231 gimple_debug_bind_set_value (stmt, t);
3232 break;
3236 if (gimple_debug_source_bind_p (stmt))
3237 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3238 remap_gimple_op_r, &wi, NULL);
3241 processing_debug_stmt = 0;
3243 update_stmt (stmt);
3246 /* Process deferred debug stmts. In order to give values better odds
3247 of being successfully remapped, we delay the processing of debug
3248 stmts until all other stmts that might require remapping are
3249 processed. */
3251 static void
3252 copy_debug_stmts (copy_body_data *id)
3254 size_t i;
3255 gdebug *stmt;
3257 if (!id->debug_stmts.exists ())
3258 return;
3260 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3261 copy_debug_stmt (stmt, id);
3263 id->debug_stmts.release ();
3266 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3267 another function. */
3269 static tree
3270 copy_tree_body (copy_body_data *id)
3272 tree fndecl = id->src_fn;
3273 tree body = DECL_SAVED_TREE (fndecl);
3275 walk_tree (&body, copy_tree_body_r, id, NULL);
3277 return body;
3280 /* Make a copy of the body of FN so that it can be inserted inline in
3281 another function. */
3283 static tree
3284 copy_body (copy_body_data *id,
3285 basic_block entry_block_map, basic_block exit_block_map,
3286 basic_block new_entry)
3288 tree fndecl = id->src_fn;
3289 tree body;
3291 /* If this body has a CFG, walk CFG and copy. */
3292 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3293 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3294 new_entry);
3295 copy_debug_stmts (id);
3296 delete id->killed_new_ssa_names;
3297 id->killed_new_ssa_names = NULL;
3299 return body;
3302 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3303 defined in function FN, or of a data member thereof. */
3305 static bool
3306 self_inlining_addr_expr (tree value, tree fn)
3308 tree var;
3310 if (TREE_CODE (value) != ADDR_EXPR)
3311 return false;
3313 var = get_base_address (TREE_OPERAND (value, 0));
3315 return var && auto_var_in_fn_p (var, fn);
3318 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3319 lexical block and line number information from base_stmt, if given,
3320 or from the last stmt of the block otherwise. */
3322 static gimple *
3323 insert_init_debug_bind (copy_body_data *id,
3324 basic_block bb, tree var, tree value,
3325 gimple *base_stmt)
3327 gimple *note;
3328 gimple_stmt_iterator gsi;
3329 tree tracked_var;
3331 if (!gimple_in_ssa_p (id->src_cfun))
3332 return NULL;
3334 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3335 return NULL;
3337 tracked_var = target_for_debug_bind (var);
3338 if (!tracked_var)
3339 return NULL;
3341 if (bb)
3343 gsi = gsi_last_bb (bb);
3344 if (!base_stmt && !gsi_end_p (gsi))
3345 base_stmt = gsi_stmt (gsi);
3348 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3350 if (bb)
3352 if (!gsi_end_p (gsi))
3353 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3354 else
3355 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3358 return note;
3361 static void
3362 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3364 /* If VAR represents a zero-sized variable, it's possible that the
3365 assignment statement may result in no gimple statements. */
3366 if (init_stmt)
3368 gimple_stmt_iterator si = gsi_last_bb (bb);
3370 /* We can end up with init statements that store to a non-register
3371 from a rhs with a conversion. Handle that here by forcing the
3372 rhs into a temporary. gimple_regimplify_operands is not
3373 prepared to do this for us. */
3374 if (!is_gimple_debug (init_stmt)
3375 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3376 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3377 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3379 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3380 gimple_expr_type (init_stmt),
3381 gimple_assign_rhs1 (init_stmt));
3382 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3383 GSI_NEW_STMT);
3384 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3385 gimple_assign_set_rhs1 (init_stmt, rhs);
3387 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3388 if (!is_gimple_debug (init_stmt))
3390 gimple_regimplify_operands (init_stmt, &si);
3392 tree def = gimple_assign_lhs (init_stmt);
3393 insert_init_debug_bind (id, bb, def, def, init_stmt);
3398 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3399 if need be (which should only be necessary for invalid programs). Attempt
3400 to convert VAL to TYPE and return the result if it is possible, just return
3401 a zero constant of the given type if it fails. */
3403 tree
3404 force_value_to_type (tree type, tree value)
3406 /* If we can match up types by promotion/demotion do so. */
3407 if (fold_convertible_p (type, value))
3408 return fold_convert (type, value);
3410 /* ??? For valid programs we should not end up here.
3411 Still if we end up with truly mismatched types here, fall back
3412 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3413 GIMPLE to the following passes. */
3414 if (!is_gimple_reg_type (TREE_TYPE (value))
3415 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3416 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3417 else
3418 return build_zero_cst (type);
3421 /* Initialize parameter P with VALUE. If needed, produce init statement
3422 at the end of BB. When BB is NULL, we return init statement to be
3423 output later. */
3424 static gimple *
3425 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3426 basic_block bb, tree *vars)
3428 gimple *init_stmt = NULL;
3429 tree var;
3430 tree rhs = value;
3431 tree def = (gimple_in_ssa_p (cfun)
3432 ? ssa_default_def (id->src_cfun, p) : NULL);
3434 if (value
3435 && value != error_mark_node
3436 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3437 rhs = force_value_to_type (TREE_TYPE (p), value);
3439 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3440 here since the type of this decl must be visible to the calling
3441 function. */
3442 var = copy_decl_to_var (p, id);
3444 /* Declare this new variable. */
3445 DECL_CHAIN (var) = *vars;
3446 *vars = var;
3448 /* Make gimplifier happy about this variable. */
3449 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3451 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3452 we would not need to create a new variable here at all, if it
3453 weren't for debug info. Still, we can just use the argument
3454 value. */
3455 if (TREE_READONLY (p)
3456 && !TREE_ADDRESSABLE (p)
3457 && value && !TREE_SIDE_EFFECTS (value)
3458 && !def)
3460 /* We may produce non-gimple trees by adding NOPs or introduce
3461 invalid sharing when operand is not really constant.
3462 It is not big deal to prohibit constant propagation here as
3463 we will constant propagate in DOM1 pass anyway. */
3464 if (is_gimple_min_invariant (value)
3465 && useless_type_conversion_p (TREE_TYPE (p),
3466 TREE_TYPE (value))
3467 /* We have to be very careful about ADDR_EXPR. Make sure
3468 the base variable isn't a local variable of the inlined
3469 function, e.g., when doing recursive inlining, direct or
3470 mutually-recursive or whatever, which is why we don't
3471 just test whether fn == current_function_decl. */
3472 && ! self_inlining_addr_expr (value, fn))
3474 insert_decl_map (id, p, value);
3475 insert_debug_decl_map (id, p, var);
3476 return insert_init_debug_bind (id, bb, var, value, NULL);
3480 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3481 that way, when the PARM_DECL is encountered, it will be
3482 automatically replaced by the VAR_DECL. */
3483 insert_decl_map (id, p, var);
3485 /* Even if P was TREE_READONLY, the new VAR should not be.
3486 In the original code, we would have constructed a
3487 temporary, and then the function body would have never
3488 changed the value of P. However, now, we will be
3489 constructing VAR directly. The constructor body may
3490 change its value multiple times as it is being
3491 constructed. Therefore, it must not be TREE_READONLY;
3492 the back-end assumes that TREE_READONLY variable is
3493 assigned to only once. */
3494 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3495 TREE_READONLY (var) = 0;
3497 /* If there is no setup required and we are in SSA, take the easy route
3498 replacing all SSA names representing the function parameter by the
3499 SSA name passed to function.
3501 We need to construct map for the variable anyway as it might be used
3502 in different SSA names when parameter is set in function.
3504 Do replacement at -O0 for const arguments replaced by constant.
3505 This is important for builtin_constant_p and other construct requiring
3506 constant argument to be visible in inlined function body. */
3507 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3508 && (optimize
3509 || (TREE_READONLY (p)
3510 && is_gimple_min_invariant (rhs)))
3511 && (TREE_CODE (rhs) == SSA_NAME
3512 || is_gimple_min_invariant (rhs))
3513 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3515 insert_decl_map (id, def, rhs);
3516 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3519 /* If the value of argument is never used, don't care about initializing
3520 it. */
3521 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3523 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3524 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3527 /* Initialize this VAR_DECL from the equivalent argument. Convert
3528 the argument to the proper type in case it was promoted. */
3529 if (value)
3531 if (rhs == error_mark_node)
3533 insert_decl_map (id, p, var);
3534 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3537 STRIP_USELESS_TYPE_CONVERSION (rhs);
3539 /* If we are in SSA form properly remap the default definition
3540 or assign to a dummy SSA name if the parameter is unused and
3541 we are not optimizing. */
3542 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3544 if (def)
3546 def = remap_ssa_name (def, id);
3547 init_stmt = gimple_build_assign (def, rhs);
3548 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3549 set_ssa_default_def (cfun, var, NULL);
3551 else if (!optimize)
3553 def = make_ssa_name (var);
3554 init_stmt = gimple_build_assign (def, rhs);
3557 else
3558 init_stmt = gimple_build_assign (var, rhs);
3560 if (bb && init_stmt)
3561 insert_init_stmt (id, bb, init_stmt);
3563 return init_stmt;
3566 /* Generate code to initialize the parameters of the function at the
3567 top of the stack in ID from the GIMPLE_CALL STMT. */
3569 static void
3570 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3571 tree fn, basic_block bb)
3573 tree parms;
3574 size_t i;
3575 tree p;
3576 tree vars = NULL_TREE;
3577 tree static_chain = gimple_call_chain (stmt);
3579 /* Figure out what the parameters are. */
3580 parms = DECL_ARGUMENTS (fn);
3582 /* Loop through the parameter declarations, replacing each with an
3583 equivalent VAR_DECL, appropriately initialized. */
3584 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3586 tree val;
3587 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3588 setup_one_parameter (id, p, val, fn, bb, &vars);
3590 /* After remapping parameters remap their types. This has to be done
3591 in a second loop over all parameters to appropriately remap
3592 variable sized arrays when the size is specified in a
3593 parameter following the array. */
3594 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3596 tree *varp = id->decl_map->get (p);
3597 if (varp && VAR_P (*varp))
3599 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3600 ? ssa_default_def (id->src_cfun, p) : NULL);
3601 tree var = *varp;
3602 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3603 /* Also remap the default definition if it was remapped
3604 to the default definition of the parameter replacement
3605 by the parameter setup. */
3606 if (def)
3608 tree *defp = id->decl_map->get (def);
3609 if (defp
3610 && TREE_CODE (*defp) == SSA_NAME
3611 && SSA_NAME_VAR (*defp) == var)
3612 TREE_TYPE (*defp) = TREE_TYPE (var);
3617 /* Initialize the static chain. */
3618 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3619 gcc_assert (fn != current_function_decl);
3620 if (p)
3622 /* No static chain? Seems like a bug in tree-nested.c. */
3623 gcc_assert (static_chain);
3625 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3628 declare_inline_vars (id->block, vars);
3632 /* Declare a return variable to replace the RESULT_DECL for the
3633 function we are calling. An appropriate DECL_STMT is returned.
3634 The USE_STMT is filled to contain a use of the declaration to
3635 indicate the return value of the function.
3637 RETURN_SLOT, if non-null is place where to store the result. It
3638 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3639 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3641 The return value is a (possibly null) value that holds the result
3642 as seen by the caller. */
3644 static tree
3645 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3646 basic_block entry_bb)
3648 tree callee = id->src_fn;
3649 tree result = DECL_RESULT (callee);
3650 tree callee_type = TREE_TYPE (result);
3651 tree caller_type;
3652 tree var, use;
3654 /* Handle type-mismatches in the function declaration return type
3655 vs. the call expression. */
3656 if (modify_dest)
3657 caller_type = TREE_TYPE (modify_dest);
3658 else if (return_slot)
3659 caller_type = TREE_TYPE (return_slot);
3660 else /* No LHS on the call. */
3661 caller_type = TREE_TYPE (TREE_TYPE (callee));
3663 /* We don't need to do anything for functions that don't return anything. */
3664 if (VOID_TYPE_P (callee_type))
3665 return NULL_TREE;
3667 /* If there was a return slot, then the return value is the
3668 dereferenced address of that object. */
3669 if (return_slot)
3671 /* The front end shouldn't have used both return_slot and
3672 a modify expression. */
3673 gcc_assert (!modify_dest);
3674 if (DECL_BY_REFERENCE (result))
3676 tree return_slot_addr = build_fold_addr_expr (return_slot);
3677 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3679 /* We are going to construct *&return_slot and we can't do that
3680 for variables believed to be not addressable.
3682 FIXME: This check possibly can match, because values returned
3683 via return slot optimization are not believed to have address
3684 taken by alias analysis. */
3685 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3686 var = return_slot_addr;
3687 mark_addressable (return_slot);
3689 else
3691 var = return_slot;
3692 gcc_assert (TREE_CODE (var) != SSA_NAME);
3693 if (TREE_ADDRESSABLE (result))
3694 mark_addressable (var);
3696 if (DECL_NOT_GIMPLE_REG_P (result)
3697 && DECL_P (var))
3698 DECL_NOT_GIMPLE_REG_P (var) = 1;
3700 if (!useless_type_conversion_p (callee_type, caller_type))
3701 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3703 use = NULL;
3704 goto done;
3707 /* All types requiring non-trivial constructors should have been handled. */
3708 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3710 /* Attempt to avoid creating a new temporary variable. */
3711 if (modify_dest
3712 && TREE_CODE (modify_dest) != SSA_NAME)
3714 bool use_it = false;
3716 /* We can't use MODIFY_DEST if there's type promotion involved. */
3717 if (!useless_type_conversion_p (callee_type, caller_type))
3718 use_it = false;
3720 /* ??? If we're assigning to a variable sized type, then we must
3721 reuse the destination variable, because we've no good way to
3722 create variable sized temporaries at this point. */
3723 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3724 use_it = true;
3726 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3727 reuse it as the result of the call directly. Don't do this if
3728 it would promote MODIFY_DEST to addressable. */
3729 else if (TREE_ADDRESSABLE (result))
3730 use_it = false;
3731 else
3733 tree base_m = get_base_address (modify_dest);
3735 /* If the base isn't a decl, then it's a pointer, and we don't
3736 know where that's going to go. */
3737 if (!DECL_P (base_m))
3738 use_it = false;
3739 else if (is_global_var (base_m))
3740 use_it = false;
3741 else if (DECL_NOT_GIMPLE_REG_P (result)
3742 && !DECL_NOT_GIMPLE_REG_P (base_m))
3743 use_it = false;
3744 else if (!TREE_ADDRESSABLE (base_m))
3745 use_it = true;
3748 if (use_it)
3750 var = modify_dest;
3751 use = NULL;
3752 goto done;
3756 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3758 var = copy_result_decl_to_var (result, id);
3759 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3761 /* Do not have the rest of GCC warn about this variable as it should
3762 not be visible to the user. */
3763 TREE_NO_WARNING (var) = 1;
3765 declare_inline_vars (id->block, var);
3767 /* Build the use expr. If the return type of the function was
3768 promoted, convert it back to the expected type. */
3769 use = var;
3770 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3772 /* If we can match up types by promotion/demotion do so. */
3773 if (fold_convertible_p (caller_type, var))
3774 use = fold_convert (caller_type, var);
3775 else
3777 /* ??? For valid programs we should not end up here.
3778 Still if we end up with truly mismatched types here, fall back
3779 to using a MEM_REF to not leak invalid GIMPLE to the following
3780 passes. */
3781 /* Prevent var from being written into SSA form. */
3782 if (is_gimple_reg_type (TREE_TYPE (var)))
3783 DECL_NOT_GIMPLE_REG_P (var) = true;
3784 use = fold_build2 (MEM_REF, caller_type,
3785 build_fold_addr_expr (var),
3786 build_int_cst (ptr_type_node, 0));
3790 STRIP_USELESS_TYPE_CONVERSION (use);
3792 if (DECL_BY_REFERENCE (result))
3794 TREE_ADDRESSABLE (var) = 1;
3795 var = build_fold_addr_expr (var);
3798 done:
3799 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3800 way, when the RESULT_DECL is encountered, it will be
3801 automatically replaced by the VAR_DECL.
3803 When returning by reference, ensure that RESULT_DECL remaps to
3804 gimple_val. */
3805 if (DECL_BY_REFERENCE (result)
3806 && !is_gimple_val (var))
3808 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3809 insert_decl_map (id, result, temp);
3810 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3811 it's default_def SSA_NAME. */
3812 if (gimple_in_ssa_p (id->src_cfun)
3813 && is_gimple_reg (result))
3815 temp = make_ssa_name (temp);
3816 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3818 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3820 else
3821 insert_decl_map (id, result, var);
3823 /* Remember this so we can ignore it in remap_decls. */
3824 id->retvar = var;
3825 return use;
3828 /* Determine if the function can be copied. If so return NULL. If
3829 not return a string describng the reason for failure. */
3831 const char *
3832 copy_forbidden (struct function *fun)
3834 const char *reason = fun->cannot_be_copied_reason;
3836 /* Only examine the function once. */
3837 if (fun->cannot_be_copied_set)
3838 return reason;
3840 /* We cannot copy a function that receives a non-local goto
3841 because we cannot remap the destination label used in the
3842 function that is performing the non-local goto. */
3843 /* ??? Actually, this should be possible, if we work at it.
3844 No doubt there's just a handful of places that simply
3845 assume it doesn't happen and don't substitute properly. */
3846 if (fun->has_nonlocal_label)
3848 reason = G_("function %q+F can never be copied "
3849 "because it receives a non-local goto");
3850 goto fail;
3853 if (fun->has_forced_label_in_static)
3855 reason = G_("function %q+F can never be copied because it saves "
3856 "address of local label in a static variable");
3857 goto fail;
3860 fail:
3861 fun->cannot_be_copied_reason = reason;
3862 fun->cannot_be_copied_set = true;
3863 return reason;
3867 static const char *inline_forbidden_reason;
3869 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3870 iff a function cannot be inlined. Also sets the reason why. */
3872 static tree
3873 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3874 struct walk_stmt_info *wip)
3876 tree fn = (tree) wip->info;
3877 tree t;
3878 gimple *stmt = gsi_stmt (*gsi);
3880 switch (gimple_code (stmt))
3882 case GIMPLE_CALL:
3883 /* Refuse to inline alloca call unless user explicitly forced so as
3884 this may change program's memory overhead drastically when the
3885 function using alloca is called in loop. In GCC present in
3886 SPEC2000 inlining into schedule_block cause it to require 2GB of
3887 RAM instead of 256MB. Don't do so for alloca calls emitted for
3888 VLA objects as those can't cause unbounded growth (they're always
3889 wrapped inside stack_save/stack_restore regions. */
3890 if (gimple_maybe_alloca_call_p (stmt)
3891 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3892 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3894 inline_forbidden_reason
3895 = G_("function %q+F can never be inlined because it uses "
3896 "alloca (override using the always_inline attribute)");
3897 *handled_ops_p = true;
3898 return fn;
3901 t = gimple_call_fndecl (stmt);
3902 if (t == NULL_TREE)
3903 break;
3905 /* We cannot inline functions that call setjmp. */
3906 if (setjmp_call_p (t))
3908 inline_forbidden_reason
3909 = G_("function %q+F can never be inlined because it uses setjmp");
3910 *handled_ops_p = true;
3911 return t;
3914 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3915 switch (DECL_FUNCTION_CODE (t))
3917 /* We cannot inline functions that take a variable number of
3918 arguments. */
3919 case BUILT_IN_VA_START:
3920 case BUILT_IN_NEXT_ARG:
3921 case BUILT_IN_VA_END:
3922 inline_forbidden_reason
3923 = G_("function %q+F can never be inlined because it "
3924 "uses variable argument lists");
3925 *handled_ops_p = true;
3926 return t;
3928 case BUILT_IN_LONGJMP:
3929 /* We can't inline functions that call __builtin_longjmp at
3930 all. The non-local goto machinery really requires the
3931 destination be in a different function. If we allow the
3932 function calling __builtin_longjmp to be inlined into the
3933 function calling __builtin_setjmp, Things will Go Awry. */
3934 inline_forbidden_reason
3935 = G_("function %q+F can never be inlined because "
3936 "it uses setjmp-longjmp exception handling");
3937 *handled_ops_p = true;
3938 return t;
3940 case BUILT_IN_NONLOCAL_GOTO:
3941 /* Similarly. */
3942 inline_forbidden_reason
3943 = G_("function %q+F can never be inlined because "
3944 "it uses non-local goto");
3945 *handled_ops_p = true;
3946 return t;
3948 case BUILT_IN_RETURN:
3949 case BUILT_IN_APPLY_ARGS:
3950 /* If a __builtin_apply_args caller would be inlined,
3951 it would be saving arguments of the function it has
3952 been inlined into. Similarly __builtin_return would
3953 return from the function the inline has been inlined into. */
3954 inline_forbidden_reason
3955 = G_("function %q+F can never be inlined because "
3956 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3957 *handled_ops_p = true;
3958 return t;
3960 default:
3961 break;
3963 break;
3965 case GIMPLE_GOTO:
3966 t = gimple_goto_dest (stmt);
3968 /* We will not inline a function which uses computed goto. The
3969 addresses of its local labels, which may be tucked into
3970 global storage, are of course not constant across
3971 instantiations, which causes unexpected behavior. */
3972 if (TREE_CODE (t) != LABEL_DECL)
3974 inline_forbidden_reason
3975 = G_("function %q+F can never be inlined "
3976 "because it contains a computed goto");
3977 *handled_ops_p = true;
3978 return t;
3980 break;
3982 default:
3983 break;
3986 *handled_ops_p = false;
3987 return NULL_TREE;
3990 /* Return true if FNDECL is a function that cannot be inlined into
3991 another one. */
3993 static bool
3994 inline_forbidden_p (tree fndecl)
3996 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3997 struct walk_stmt_info wi;
3998 basic_block bb;
3999 bool forbidden_p = false;
4001 /* First check for shared reasons not to copy the code. */
4002 inline_forbidden_reason = copy_forbidden (fun);
4003 if (inline_forbidden_reason != NULL)
4004 return true;
4006 /* Next, walk the statements of the function looking for
4007 constraucts we can't handle, or are non-optimal for inlining. */
4008 hash_set<tree> visited_nodes;
4009 memset (&wi, 0, sizeof (wi));
4010 wi.info = (void *) fndecl;
4011 wi.pset = &visited_nodes;
4013 FOR_EACH_BB_FN (bb, fun)
4015 gimple *ret;
4016 gimple_seq seq = bb_seq (bb);
4017 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4018 forbidden_p = (ret != NULL);
4019 if (forbidden_p)
4020 break;
4023 return forbidden_p;
4026 /* Return false if the function FNDECL cannot be inlined on account of its
4027 attributes, true otherwise. */
4028 static bool
4029 function_attribute_inlinable_p (const_tree fndecl)
4031 if (targetm.attribute_table)
4033 const_tree a;
4035 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4037 const_tree name = get_attribute_name (a);
4038 int i;
4040 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4041 if (is_attribute_p (targetm.attribute_table[i].name, name))
4042 return targetm.function_attribute_inlinable_p (fndecl);
4046 return true;
4049 /* Returns nonzero if FN is a function that does not have any
4050 fundamental inline blocking properties. */
4052 bool
4053 tree_inlinable_function_p (tree fn)
4055 bool inlinable = true;
4056 bool do_warning;
4057 tree always_inline;
4059 /* If we've already decided this function shouldn't be inlined,
4060 there's no need to check again. */
4061 if (DECL_UNINLINABLE (fn))
4062 return false;
4064 /* We only warn for functions declared `inline' by the user. */
4065 do_warning = (opt_for_fn (fn, warn_inline)
4066 && DECL_DECLARED_INLINE_P (fn)
4067 && !DECL_NO_INLINE_WARNING_P (fn)
4068 && !DECL_IN_SYSTEM_HEADER (fn));
4070 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4072 if (flag_no_inline
4073 && always_inline == NULL)
4075 if (do_warning)
4076 warning (OPT_Winline, "function %q+F can never be inlined because it "
4077 "is suppressed using %<-fno-inline%>", fn);
4078 inlinable = false;
4081 else if (!function_attribute_inlinable_p (fn))
4083 if (do_warning)
4084 warning (OPT_Winline, "function %q+F can never be inlined because it "
4085 "uses attributes conflicting with inlining", fn);
4086 inlinable = false;
4089 else if (inline_forbidden_p (fn))
4091 /* See if we should warn about uninlinable functions. Previously,
4092 some of these warnings would be issued while trying to expand
4093 the function inline, but that would cause multiple warnings
4094 about functions that would for example call alloca. But since
4095 this a property of the function, just one warning is enough.
4096 As a bonus we can now give more details about the reason why a
4097 function is not inlinable. */
4098 if (always_inline)
4099 error (inline_forbidden_reason, fn);
4100 else if (do_warning)
4101 warning (OPT_Winline, inline_forbidden_reason, fn);
4103 inlinable = false;
4106 /* Squirrel away the result so that we don't have to check again. */
4107 DECL_UNINLINABLE (fn) = !inlinable;
4109 return inlinable;
4112 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4113 word size and take possible memcpy call into account and return
4114 cost based on whether optimizing for size or speed according to SPEED_P. */
4117 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4119 HOST_WIDE_INT size;
4121 gcc_assert (!VOID_TYPE_P (type));
4123 if (TREE_CODE (type) == VECTOR_TYPE)
4125 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4126 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4127 int orig_mode_size
4128 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4129 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4130 return ((orig_mode_size + simd_mode_size - 1)
4131 / simd_mode_size);
4134 size = int_size_in_bytes (type);
4136 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4137 /* Cost of a memcpy call, 3 arguments and the call. */
4138 return 4;
4139 else
4140 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4143 /* Returns cost of operation CODE, according to WEIGHTS */
4145 static int
4146 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4147 tree op1 ATTRIBUTE_UNUSED, tree op2)
4149 switch (code)
4151 /* These are "free" conversions, or their presumed cost
4152 is folded into other operations. */
4153 case RANGE_EXPR:
4154 CASE_CONVERT:
4155 case COMPLEX_EXPR:
4156 case PAREN_EXPR:
4157 case VIEW_CONVERT_EXPR:
4158 return 0;
4160 /* Assign cost of 1 to usual operations.
4161 ??? We may consider mapping RTL costs to this. */
4162 case COND_EXPR:
4163 case VEC_COND_EXPR:
4164 case VEC_PERM_EXPR:
4166 case PLUS_EXPR:
4167 case POINTER_PLUS_EXPR:
4168 case POINTER_DIFF_EXPR:
4169 case MINUS_EXPR:
4170 case MULT_EXPR:
4171 case MULT_HIGHPART_EXPR:
4173 case ADDR_SPACE_CONVERT_EXPR:
4174 case FIXED_CONVERT_EXPR:
4175 case FIX_TRUNC_EXPR:
4177 case NEGATE_EXPR:
4178 case FLOAT_EXPR:
4179 case MIN_EXPR:
4180 case MAX_EXPR:
4181 case ABS_EXPR:
4182 case ABSU_EXPR:
4184 case LSHIFT_EXPR:
4185 case RSHIFT_EXPR:
4186 case LROTATE_EXPR:
4187 case RROTATE_EXPR:
4189 case BIT_IOR_EXPR:
4190 case BIT_XOR_EXPR:
4191 case BIT_AND_EXPR:
4192 case BIT_NOT_EXPR:
4194 case TRUTH_ANDIF_EXPR:
4195 case TRUTH_ORIF_EXPR:
4196 case TRUTH_AND_EXPR:
4197 case TRUTH_OR_EXPR:
4198 case TRUTH_XOR_EXPR:
4199 case TRUTH_NOT_EXPR:
4201 case LT_EXPR:
4202 case LE_EXPR:
4203 case GT_EXPR:
4204 case GE_EXPR:
4205 case EQ_EXPR:
4206 case NE_EXPR:
4207 case ORDERED_EXPR:
4208 case UNORDERED_EXPR:
4210 case UNLT_EXPR:
4211 case UNLE_EXPR:
4212 case UNGT_EXPR:
4213 case UNGE_EXPR:
4214 case UNEQ_EXPR:
4215 case LTGT_EXPR:
4217 case CONJ_EXPR:
4219 case PREDECREMENT_EXPR:
4220 case PREINCREMENT_EXPR:
4221 case POSTDECREMENT_EXPR:
4222 case POSTINCREMENT_EXPR:
4224 case REALIGN_LOAD_EXPR:
4226 case WIDEN_SUM_EXPR:
4227 case WIDEN_MULT_EXPR:
4228 case DOT_PROD_EXPR:
4229 case SAD_EXPR:
4230 case WIDEN_MULT_PLUS_EXPR:
4231 case WIDEN_MULT_MINUS_EXPR:
4232 case WIDEN_LSHIFT_EXPR:
4234 case VEC_WIDEN_MULT_HI_EXPR:
4235 case VEC_WIDEN_MULT_LO_EXPR:
4236 case VEC_WIDEN_MULT_EVEN_EXPR:
4237 case VEC_WIDEN_MULT_ODD_EXPR:
4238 case VEC_UNPACK_HI_EXPR:
4239 case VEC_UNPACK_LO_EXPR:
4240 case VEC_UNPACK_FLOAT_HI_EXPR:
4241 case VEC_UNPACK_FLOAT_LO_EXPR:
4242 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4243 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4244 case VEC_PACK_TRUNC_EXPR:
4245 case VEC_PACK_SAT_EXPR:
4246 case VEC_PACK_FIX_TRUNC_EXPR:
4247 case VEC_PACK_FLOAT_EXPR:
4248 case VEC_WIDEN_LSHIFT_HI_EXPR:
4249 case VEC_WIDEN_LSHIFT_LO_EXPR:
4250 case VEC_DUPLICATE_EXPR:
4251 case VEC_SERIES_EXPR:
4253 return 1;
4255 /* Few special cases of expensive operations. This is useful
4256 to avoid inlining on functions having too many of these. */
4257 case TRUNC_DIV_EXPR:
4258 case CEIL_DIV_EXPR:
4259 case FLOOR_DIV_EXPR:
4260 case ROUND_DIV_EXPR:
4261 case EXACT_DIV_EXPR:
4262 case TRUNC_MOD_EXPR:
4263 case CEIL_MOD_EXPR:
4264 case FLOOR_MOD_EXPR:
4265 case ROUND_MOD_EXPR:
4266 case RDIV_EXPR:
4267 if (TREE_CODE (op2) != INTEGER_CST)
4268 return weights->div_mod_cost;
4269 return 1;
4271 /* Bit-field insertion needs several shift and mask operations. */
4272 case BIT_INSERT_EXPR:
4273 return 3;
4275 default:
4276 /* We expect a copy assignment with no operator. */
4277 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4278 return 0;
4283 /* Estimate number of instructions that will be created by expanding
4284 the statements in the statement sequence STMTS.
4285 WEIGHTS contains weights attributed to various constructs. */
4288 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4290 int cost;
4291 gimple_stmt_iterator gsi;
4293 cost = 0;
4294 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4295 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4297 return cost;
4301 /* Estimate number of instructions that will be created by expanding STMT.
4302 WEIGHTS contains weights attributed to various constructs. */
4305 estimate_num_insns (gimple *stmt, eni_weights *weights)
4307 unsigned cost, i;
4308 enum gimple_code code = gimple_code (stmt);
4309 tree lhs;
4310 tree rhs;
4312 switch (code)
4314 case GIMPLE_ASSIGN:
4315 /* Try to estimate the cost of assignments. We have three cases to
4316 deal with:
4317 1) Simple assignments to registers;
4318 2) Stores to things that must live in memory. This includes
4319 "normal" stores to scalars, but also assignments of large
4320 structures, or constructors of big arrays;
4322 Let us look at the first two cases, assuming we have "a = b + C":
4323 <GIMPLE_ASSIGN <var_decl "a">
4324 <plus_expr <var_decl "b"> <constant C>>
4325 If "a" is a GIMPLE register, the assignment to it is free on almost
4326 any target, because "a" usually ends up in a real register. Hence
4327 the only cost of this expression comes from the PLUS_EXPR, and we
4328 can ignore the GIMPLE_ASSIGN.
4329 If "a" is not a GIMPLE register, the assignment to "a" will most
4330 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4331 of moving something into "a", which we compute using the function
4332 estimate_move_cost. */
4333 if (gimple_clobber_p (stmt))
4334 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4336 lhs = gimple_assign_lhs (stmt);
4337 rhs = gimple_assign_rhs1 (stmt);
4339 cost = 0;
4341 /* Account for the cost of moving to / from memory. */
4342 if (gimple_store_p (stmt))
4343 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4344 if (gimple_assign_load_p (stmt))
4345 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4347 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4348 gimple_assign_rhs1 (stmt),
4349 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4350 == GIMPLE_BINARY_RHS
4351 ? gimple_assign_rhs2 (stmt) : NULL);
4352 break;
4354 case GIMPLE_COND:
4355 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4356 gimple_op (stmt, 0),
4357 gimple_op (stmt, 1));
4358 break;
4360 case GIMPLE_SWITCH:
4362 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4363 /* Take into account cost of the switch + guess 2 conditional jumps for
4364 each case label.
4366 TODO: once the switch expansion logic is sufficiently separated, we can
4367 do better job on estimating cost of the switch. */
4368 if (weights->time_based)
4369 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4370 else
4371 cost = gimple_switch_num_labels (switch_stmt) * 2;
4373 break;
4375 case GIMPLE_CALL:
4377 tree decl;
4379 if (gimple_call_internal_p (stmt))
4380 return 0;
4381 else if ((decl = gimple_call_fndecl (stmt))
4382 && fndecl_built_in_p (decl))
4384 /* Do not special case builtins where we see the body.
4385 This just confuse inliner. */
4386 struct cgraph_node *node;
4387 if (!(node = cgraph_node::get (decl))
4388 || node->definition)
4390 /* For buitins that are likely expanded to nothing or
4391 inlined do not account operand costs. */
4392 else if (is_simple_builtin (decl))
4393 return 0;
4394 else if (is_inexpensive_builtin (decl))
4395 return weights->target_builtin_call_cost;
4396 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4398 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4399 specialize the cheap expansion we do here.
4400 ??? This asks for a more general solution. */
4401 switch (DECL_FUNCTION_CODE (decl))
4403 case BUILT_IN_POW:
4404 case BUILT_IN_POWF:
4405 case BUILT_IN_POWL:
4406 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4407 && (real_equal
4408 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4409 &dconst2)))
4410 return estimate_operator_cost
4411 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4412 gimple_call_arg (stmt, 0));
4413 break;
4415 default:
4416 break;
4421 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4422 if (gimple_call_lhs (stmt))
4423 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4424 weights->time_based);
4425 for (i = 0; i < gimple_call_num_args (stmt); i++)
4427 tree arg = gimple_call_arg (stmt, i);
4428 cost += estimate_move_cost (TREE_TYPE (arg),
4429 weights->time_based);
4431 break;
4434 case GIMPLE_RETURN:
4435 return weights->return_cost;
4437 case GIMPLE_GOTO:
4438 case GIMPLE_LABEL:
4439 case GIMPLE_NOP:
4440 case GIMPLE_PHI:
4441 case GIMPLE_PREDICT:
4442 case GIMPLE_DEBUG:
4443 return 0;
4445 case GIMPLE_ASM:
4447 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4448 /* 1000 means infinity. This avoids overflows later
4449 with very long asm statements. */
4450 if (count > 1000)
4451 count = 1000;
4452 /* If this asm is asm inline, count anything as minimum size. */
4453 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4454 count = MIN (1, count);
4455 return MAX (1, count);
4458 case GIMPLE_RESX:
4459 /* This is either going to be an external function call with one
4460 argument, or two register copy statements plus a goto. */
4461 return 2;
4463 case GIMPLE_EH_DISPATCH:
4464 /* ??? This is going to turn into a switch statement. Ideally
4465 we'd have a look at the eh region and estimate the number of
4466 edges involved. */
4467 return 10;
4469 case GIMPLE_BIND:
4470 return estimate_num_insns_seq (
4471 gimple_bind_body (as_a <gbind *> (stmt)),
4472 weights);
4474 case GIMPLE_EH_FILTER:
4475 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4477 case GIMPLE_CATCH:
4478 return estimate_num_insns_seq (gimple_catch_handler (
4479 as_a <gcatch *> (stmt)),
4480 weights);
4482 case GIMPLE_TRY:
4483 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4484 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4486 /* OMP directives are generally very expensive. */
4488 case GIMPLE_OMP_RETURN:
4489 case GIMPLE_OMP_SECTIONS_SWITCH:
4490 case GIMPLE_OMP_ATOMIC_STORE:
4491 case GIMPLE_OMP_CONTINUE:
4492 /* ...except these, which are cheap. */
4493 return 0;
4495 case GIMPLE_OMP_ATOMIC_LOAD:
4496 return weights->omp_cost;
4498 case GIMPLE_OMP_FOR:
4499 return (weights->omp_cost
4500 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4501 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4503 case GIMPLE_OMP_PARALLEL:
4504 case GIMPLE_OMP_TASK:
4505 case GIMPLE_OMP_CRITICAL:
4506 case GIMPLE_OMP_MASTER:
4507 case GIMPLE_OMP_TASKGROUP:
4508 case GIMPLE_OMP_ORDERED:
4509 case GIMPLE_OMP_SCAN:
4510 case GIMPLE_OMP_SECTION:
4511 case GIMPLE_OMP_SECTIONS:
4512 case GIMPLE_OMP_SINGLE:
4513 case GIMPLE_OMP_TARGET:
4514 case GIMPLE_OMP_TEAMS:
4515 return (weights->omp_cost
4516 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4518 case GIMPLE_TRANSACTION:
4519 return (weights->tm_cost
4520 + estimate_num_insns_seq (gimple_transaction_body (
4521 as_a <gtransaction *> (stmt)),
4522 weights));
4524 default:
4525 gcc_unreachable ();
4528 return cost;
4531 /* Estimate number of instructions that will be created by expanding
4532 function FNDECL. WEIGHTS contains weights attributed to various
4533 constructs. */
4536 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4538 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4539 gimple_stmt_iterator bsi;
4540 basic_block bb;
4541 int n = 0;
4543 gcc_assert (my_function && my_function->cfg);
4544 FOR_EACH_BB_FN (bb, my_function)
4546 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4547 n += estimate_num_insns (gsi_stmt (bsi), weights);
4550 return n;
4554 /* Initializes weights used by estimate_num_insns. */
4556 void
4557 init_inline_once (void)
4559 eni_size_weights.call_cost = 1;
4560 eni_size_weights.indirect_call_cost = 3;
4561 eni_size_weights.target_builtin_call_cost = 1;
4562 eni_size_weights.div_mod_cost = 1;
4563 eni_size_weights.omp_cost = 40;
4564 eni_size_weights.tm_cost = 10;
4565 eni_size_weights.time_based = false;
4566 eni_size_weights.return_cost = 1;
4568 /* Estimating time for call is difficult, since we have no idea what the
4569 called function does. In the current uses of eni_time_weights,
4570 underestimating the cost does less harm than overestimating it, so
4571 we choose a rather small value here. */
4572 eni_time_weights.call_cost = 10;
4573 eni_time_weights.indirect_call_cost = 15;
4574 eni_time_weights.target_builtin_call_cost = 1;
4575 eni_time_weights.div_mod_cost = 10;
4576 eni_time_weights.omp_cost = 40;
4577 eni_time_weights.tm_cost = 40;
4578 eni_time_weights.time_based = true;
4579 eni_time_weights.return_cost = 2;
4583 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4585 static void
4586 prepend_lexical_block (tree current_block, tree new_block)
4588 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4589 BLOCK_SUBBLOCKS (current_block) = new_block;
4590 BLOCK_SUPERCONTEXT (new_block) = current_block;
4593 /* Add local variables from CALLEE to CALLER. */
4595 static inline void
4596 add_local_variables (struct function *callee, struct function *caller,
4597 copy_body_data *id)
4599 tree var;
4600 unsigned ix;
4602 FOR_EACH_LOCAL_DECL (callee, ix, var)
4603 if (!can_be_nonlocal (var, id))
4605 tree new_var = remap_decl (var, id);
4607 /* Remap debug-expressions. */
4608 if (VAR_P (new_var)
4609 && DECL_HAS_DEBUG_EXPR_P (var)
4610 && new_var != var)
4612 tree tem = DECL_DEBUG_EXPR (var);
4613 bool old_regimplify = id->regimplify;
4614 id->remapping_type_depth++;
4615 walk_tree (&tem, copy_tree_body_r, id, NULL);
4616 id->remapping_type_depth--;
4617 id->regimplify = old_regimplify;
4618 SET_DECL_DEBUG_EXPR (new_var, tem);
4619 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4621 add_local_decl (caller, new_var);
4625 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4626 have brought in or introduced any debug stmts for SRCVAR. */
4628 static inline void
4629 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4631 tree *remappedvarp = id->decl_map->get (srcvar);
4633 if (!remappedvarp)
4634 return;
4636 if (!VAR_P (*remappedvarp))
4637 return;
4639 if (*remappedvarp == id->retvar)
4640 return;
4642 tree tvar = target_for_debug_bind (*remappedvarp);
4643 if (!tvar)
4644 return;
4646 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4647 id->call_stmt);
4648 gimple_seq_add_stmt (bindings, stmt);
4651 /* For each inlined variable for which we may have debug bind stmts,
4652 add before GSI a final debug stmt resetting it, marking the end of
4653 its life, so that var-tracking knows it doesn't have to compute
4654 further locations for it. */
4656 static inline void
4657 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4659 tree var;
4660 unsigned ix;
4661 gimple_seq bindings = NULL;
4663 if (!gimple_in_ssa_p (id->src_cfun))
4664 return;
4666 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4667 return;
4669 for (var = DECL_ARGUMENTS (id->src_fn);
4670 var; var = DECL_CHAIN (var))
4671 reset_debug_binding (id, var, &bindings);
4673 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4674 reset_debug_binding (id, var, &bindings);
4676 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4679 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4681 static bool
4682 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4683 bitmap to_purge)
4685 tree use_retvar;
4686 tree fn;
4687 hash_map<tree, tree> *dst;
4688 hash_map<tree, tree> *st = NULL;
4689 tree return_slot;
4690 tree modify_dest;
4691 struct cgraph_edge *cg_edge;
4692 cgraph_inline_failed_t reason;
4693 basic_block return_block;
4694 edge e;
4695 gimple_stmt_iterator gsi, stmt_gsi;
4696 bool successfully_inlined = false;
4697 bool purge_dead_abnormal_edges;
4698 gcall *call_stmt;
4699 unsigned int prop_mask, src_properties;
4700 struct function *dst_cfun;
4701 tree simduid;
4702 use_operand_p use;
4703 gimple *simtenter_stmt = NULL;
4704 vec<tree> *simtvars_save;
4706 /* The gimplifier uses input_location in too many places, such as
4707 internal_get_tmp_var (). */
4708 location_t saved_location = input_location;
4709 input_location = gimple_location (stmt);
4711 /* From here on, we're only interested in CALL_EXPRs. */
4712 call_stmt = dyn_cast <gcall *> (stmt);
4713 if (!call_stmt)
4714 goto egress;
4716 cg_edge = id->dst_node->get_edge (stmt);
4717 gcc_checking_assert (cg_edge);
4718 /* First, see if we can figure out what function is being called.
4719 If we cannot, then there is no hope of inlining the function. */
4720 if (cg_edge->indirect_unknown_callee)
4721 goto egress;
4722 fn = cg_edge->callee->decl;
4723 gcc_checking_assert (fn);
4725 /* If FN is a declaration of a function in a nested scope that was
4726 globally declared inline, we don't set its DECL_INITIAL.
4727 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4728 C++ front-end uses it for cdtors to refer to their internal
4729 declarations, that are not real functions. Fortunately those
4730 don't have trees to be saved, so we can tell by checking their
4731 gimple_body. */
4732 if (!DECL_INITIAL (fn)
4733 && DECL_ABSTRACT_ORIGIN (fn)
4734 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4735 fn = DECL_ABSTRACT_ORIGIN (fn);
4737 /* Don't try to inline functions that are not well-suited to inlining. */
4738 if (cg_edge->inline_failed)
4740 reason = cg_edge->inline_failed;
4741 /* If this call was originally indirect, we do not want to emit any
4742 inlining related warnings or sorry messages because there are no
4743 guarantees regarding those. */
4744 if (cg_edge->indirect_inlining_edge)
4745 goto egress;
4747 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4748 /* For extern inline functions that get redefined we always
4749 silently ignored always_inline flag. Better behavior would
4750 be to be able to keep both bodies and use extern inline body
4751 for inlining, but we can't do that because frontends overwrite
4752 the body. */
4753 && !cg_edge->callee->redefined_extern_inline
4754 /* During early inline pass, report only when optimization is
4755 not turned on. */
4756 && (symtab->global_info_ready
4757 || !optimize
4758 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4759 /* PR 20090218-1_0.c. Body can be provided by another module. */
4760 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4762 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4763 cgraph_inline_failed_string (reason));
4764 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4765 inform (gimple_location (stmt), "called from here");
4766 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4767 inform (DECL_SOURCE_LOCATION (cfun->decl),
4768 "called from this function");
4770 else if (opt_for_fn (fn, warn_inline)
4771 && DECL_DECLARED_INLINE_P (fn)
4772 && !DECL_NO_INLINE_WARNING_P (fn)
4773 && !DECL_IN_SYSTEM_HEADER (fn)
4774 && reason != CIF_UNSPECIFIED
4775 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4776 /* Do not warn about not inlined recursive calls. */
4777 && !cg_edge->recursive_p ()
4778 /* Avoid warnings during early inline pass. */
4779 && symtab->global_info_ready)
4781 auto_diagnostic_group d;
4782 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4783 fn, _(cgraph_inline_failed_string (reason))))
4785 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4786 inform (gimple_location (stmt), "called from here");
4787 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4788 inform (DECL_SOURCE_LOCATION (cfun->decl),
4789 "called from this function");
4792 goto egress;
4794 id->src_node = cg_edge->callee;
4796 /* If callee is thunk, all we need is to adjust the THIS pointer
4797 and redirect to function being thunked. */
4798 if (id->src_node->thunk)
4800 cgraph_edge *edge;
4801 tree virtual_offset = NULL;
4802 profile_count count = cg_edge->count;
4803 tree op;
4804 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4805 thunk_info *info = thunk_info::get (id->src_node);
4807 cgraph_edge::remove (cg_edge);
4808 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4809 gimple_uid (stmt),
4810 profile_count::one (),
4811 profile_count::one (),
4812 true);
4813 edge->count = count;
4814 if (info->virtual_offset_p)
4815 virtual_offset = size_int (info->virtual_value);
4816 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4817 NULL);
4818 gsi_insert_before (&iter, gimple_build_assign (op,
4819 gimple_call_arg (stmt, 0)),
4820 GSI_NEW_STMT);
4821 gcc_assert (info->this_adjusting);
4822 op = thunk_adjust (&iter, op, 1, info->fixed_offset,
4823 virtual_offset, info->indirect_offset);
4825 gimple_call_set_arg (stmt, 0, op);
4826 gimple_call_set_fndecl (stmt, edge->callee->decl);
4827 update_stmt (stmt);
4828 id->src_node->remove ();
4829 expand_call_inline (bb, stmt, id, to_purge);
4830 maybe_remove_unused_call_args (cfun, stmt);
4831 return true;
4833 fn = cg_edge->callee->decl;
4834 cg_edge->callee->get_untransformed_body ();
4836 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4837 cg_edge->callee->verify ();
4839 /* We will be inlining this callee. */
4840 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4842 /* Update the callers EH personality. */
4843 if (DECL_FUNCTION_PERSONALITY (fn))
4844 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4845 = DECL_FUNCTION_PERSONALITY (fn);
4847 /* Split the block before the GIMPLE_CALL. */
4848 stmt_gsi = gsi_for_stmt (stmt);
4849 gsi_prev (&stmt_gsi);
4850 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4851 bb = e->src;
4852 return_block = e->dest;
4853 remove_edge (e);
4855 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4856 been the source of abnormal edges. In this case, schedule
4857 the removal of dead abnormal edges. */
4858 gsi = gsi_start_bb (return_block);
4859 gsi_next (&gsi);
4860 purge_dead_abnormal_edges = gsi_end_p (gsi);
4862 stmt_gsi = gsi_start_bb (return_block);
4864 /* Build a block containing code to initialize the arguments, the
4865 actual inline expansion of the body, and a label for the return
4866 statements within the function to jump to. The type of the
4867 statement expression is the return type of the function call.
4868 ??? If the call does not have an associated block then we will
4869 remap all callee blocks to NULL, effectively dropping most of
4870 its debug information. This should only happen for calls to
4871 artificial decls inserted by the compiler itself. We need to
4872 either link the inlined blocks into the caller block tree or
4873 not refer to them in any way to not break GC for locations. */
4874 if (tree block = gimple_block (stmt))
4876 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4877 to make inlined_function_outer_scope_p return true on this BLOCK. */
4878 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4879 if (loc == UNKNOWN_LOCATION)
4880 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4881 if (loc == UNKNOWN_LOCATION)
4882 loc = BUILTINS_LOCATION;
4883 id->block = make_node (BLOCK);
4884 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4885 BLOCK_SOURCE_LOCATION (id->block) = loc;
4886 prepend_lexical_block (block, id->block);
4889 /* Local declarations will be replaced by their equivalents in this map. */
4890 st = id->decl_map;
4891 id->decl_map = new hash_map<tree, tree>;
4892 dst = id->debug_map;
4893 id->debug_map = NULL;
4894 if (flag_stack_reuse != SR_NONE)
4895 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4897 /* Record the function we are about to inline. */
4898 id->src_fn = fn;
4899 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4900 id->reset_location = DECL_IGNORED_P (fn);
4901 id->call_stmt = call_stmt;
4903 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4904 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4905 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4906 simtvars_save = id->dst_simt_vars;
4907 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4908 && (simduid = bb->loop_father->simduid) != NULL_TREE
4909 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4910 && single_imm_use (simduid, &use, &simtenter_stmt)
4911 && is_gimple_call (simtenter_stmt)
4912 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4913 vec_alloc (id->dst_simt_vars, 0);
4914 else
4915 id->dst_simt_vars = NULL;
4917 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4918 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4920 /* If the src function contains an IFN_VA_ARG, then so will the dst
4921 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4922 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4923 src_properties = id->src_cfun->curr_properties & prop_mask;
4924 if (src_properties != prop_mask)
4925 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4926 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4927 id->dst_node->calls_declare_variant_alt
4928 |= id->src_node->calls_declare_variant_alt;
4930 gcc_assert (!id->src_cfun->after_inlining);
4932 id->entry_bb = bb;
4933 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4935 gimple_stmt_iterator si = gsi_last_bb (bb);
4936 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4937 NOT_TAKEN),
4938 GSI_NEW_STMT);
4940 initialize_inlined_parameters (id, stmt, fn, bb);
4941 if (debug_nonbind_markers_p && debug_inline_points && id->block
4942 && inlined_function_outer_scope_p (id->block))
4944 gimple_stmt_iterator si = gsi_last_bb (bb);
4945 gsi_insert_after (&si, gimple_build_debug_inline_entry
4946 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4947 GSI_NEW_STMT);
4950 if (DECL_INITIAL (fn))
4952 if (gimple_block (stmt))
4954 tree *var;
4956 prepend_lexical_block (id->block,
4957 remap_blocks (DECL_INITIAL (fn), id));
4958 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4959 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4960 == NULL_TREE));
4961 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4962 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4963 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4964 under it. The parameters can be then evaluated in the debugger,
4965 but don't show in backtraces. */
4966 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4967 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4969 tree v = *var;
4970 *var = TREE_CHAIN (v);
4971 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4972 BLOCK_VARS (id->block) = v;
4974 else
4975 var = &TREE_CHAIN (*var);
4977 else
4978 remap_blocks_to_null (DECL_INITIAL (fn), id);
4981 /* Return statements in the function body will be replaced by jumps
4982 to the RET_LABEL. */
4983 gcc_assert (DECL_INITIAL (fn));
4984 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4986 /* Find the LHS to which the result of this call is assigned. */
4987 return_slot = NULL;
4988 if (gimple_call_lhs (stmt))
4990 modify_dest = gimple_call_lhs (stmt);
4992 /* The function which we are inlining might not return a value,
4993 in which case we should issue a warning that the function
4994 does not return a value. In that case the optimizers will
4995 see that the variable to which the value is assigned was not
4996 initialized. We do not want to issue a warning about that
4997 uninitialized variable. */
4998 if (DECL_P (modify_dest))
4999 TREE_NO_WARNING (modify_dest) = 1;
5001 if (gimple_call_return_slot_opt_p (call_stmt))
5003 return_slot = modify_dest;
5004 modify_dest = NULL;
5007 else
5008 modify_dest = NULL;
5010 /* If we are inlining a call to the C++ operator new, we don't want
5011 to use type based alias analysis on the return value. Otherwise
5012 we may get confused if the compiler sees that the inlined new
5013 function returns a pointer which was just deleted. See bug
5014 33407. */
5015 if (DECL_IS_OPERATOR_NEW_P (fn))
5017 return_slot = NULL;
5018 modify_dest = NULL;
5021 /* Declare the return variable for the function. */
5022 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5024 /* Add local vars in this inlined callee to caller. */
5025 add_local_variables (id->src_cfun, cfun, id);
5027 if (id->src_node->clone.performed_splits)
5029 /* Any calls from the inlined function will be turned into calls from the
5030 function we inline into. We must preserve notes about how to split
5031 parameters such calls should be redirected/updated. */
5032 unsigned len = vec_safe_length (id->src_node->clone.performed_splits);
5033 for (unsigned i = 0; i < len; i++)
5035 ipa_param_performed_split ps
5036 = (*id->src_node->clone.performed_splits)[i];
5037 ps.dummy_decl = remap_decl (ps.dummy_decl, id);
5038 vec_safe_push (id->dst_node->clone.performed_splits, ps);
5041 if (flag_checking)
5043 len = vec_safe_length (id->dst_node->clone.performed_splits);
5044 for (unsigned i = 0; i < len; i++)
5046 ipa_param_performed_split *ps1
5047 = &(*id->dst_node->clone.performed_splits)[i];
5048 for (unsigned j = i + 1; j < len; j++)
5050 ipa_param_performed_split *ps2
5051 = &(*id->dst_node->clone.performed_splits)[j];
5052 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
5053 || ps1->unit_offset != ps2->unit_offset);
5059 if (dump_enabled_p ())
5061 char buf[128];
5062 snprintf (buf, sizeof(buf), "%4.2f",
5063 cg_edge->sreal_frequency ().to_double ());
5064 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5065 call_stmt,
5066 "Inlining %C to %C with frequency %s\n",
5067 id->src_node, id->dst_node, buf);
5068 if (dump_file && (dump_flags & TDF_DETAILS))
5070 id->src_node->dump (dump_file);
5071 id->dst_node->dump (dump_file);
5075 /* This is it. Duplicate the callee body. Assume callee is
5076 pre-gimplified. Note that we must not alter the caller
5077 function in any way before this point, as this CALL_EXPR may be
5078 a self-referential call; if we're calling ourselves, we need to
5079 duplicate our body before altering anything. */
5080 copy_body (id, bb, return_block, NULL);
5082 reset_debug_bindings (id, stmt_gsi);
5084 if (flag_stack_reuse != SR_NONE)
5085 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5086 if (!TREE_THIS_VOLATILE (p))
5088 tree *varp = id->decl_map->get (p);
5089 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
5091 tree clobber = build_clobber (TREE_TYPE (*varp));
5092 gimple *clobber_stmt;
5093 clobber_stmt = gimple_build_assign (*varp, clobber);
5094 gimple_set_location (clobber_stmt, gimple_location (stmt));
5095 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5099 /* Reset the escaped solution. */
5100 if (cfun->gimple_df)
5101 pt_solution_reset (&cfun->gimple_df->escaped);
5103 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5104 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5106 size_t nargs = gimple_call_num_args (simtenter_stmt);
5107 vec<tree> *vars = id->dst_simt_vars;
5108 auto_vec<tree> newargs (nargs + vars->length ());
5109 for (size_t i = 0; i < nargs; i++)
5110 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5111 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5113 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5114 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5116 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5117 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5118 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5119 gsi_replace (&gsi, g, false);
5121 vec_free (id->dst_simt_vars);
5122 id->dst_simt_vars = simtvars_save;
5124 /* Clean up. */
5125 if (id->debug_map)
5127 delete id->debug_map;
5128 id->debug_map = dst;
5130 delete id->decl_map;
5131 id->decl_map = st;
5133 /* Unlink the calls virtual operands before replacing it. */
5134 unlink_stmt_vdef (stmt);
5135 if (gimple_vdef (stmt)
5136 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5137 release_ssa_name (gimple_vdef (stmt));
5139 /* If the inlined function returns a result that we care about,
5140 substitute the GIMPLE_CALL with an assignment of the return
5141 variable to the LHS of the call. That is, if STMT was
5142 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5143 if (use_retvar && gimple_call_lhs (stmt))
5145 gimple *old_stmt = stmt;
5146 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5147 gimple_set_location (stmt, gimple_location (old_stmt));
5148 gsi_replace (&stmt_gsi, stmt, false);
5149 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5150 /* Append a clobber for id->retvar if easily possible. */
5151 if (flag_stack_reuse != SR_NONE
5152 && id->retvar
5153 && VAR_P (id->retvar)
5154 && id->retvar != return_slot
5155 && id->retvar != modify_dest
5156 && !TREE_THIS_VOLATILE (id->retvar)
5157 && !is_gimple_reg (id->retvar)
5158 && !stmt_ends_bb_p (stmt))
5160 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5161 gimple *clobber_stmt;
5162 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5163 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5164 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5167 else
5169 /* Handle the case of inlining a function with no return
5170 statement, which causes the return value to become undefined. */
5171 if (gimple_call_lhs (stmt)
5172 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5174 tree name = gimple_call_lhs (stmt);
5175 tree var = SSA_NAME_VAR (name);
5176 tree def = var ? ssa_default_def (cfun, var) : NULL;
5178 if (def)
5180 /* If the variable is used undefined, make this name
5181 undefined via a move. */
5182 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5183 gsi_replace (&stmt_gsi, stmt, true);
5185 else
5187 if (!var)
5189 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5190 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5192 /* Otherwise make this variable undefined. */
5193 gsi_remove (&stmt_gsi, true);
5194 set_ssa_default_def (cfun, var, name);
5195 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5198 /* Replace with a clobber for id->retvar. */
5199 else if (flag_stack_reuse != SR_NONE
5200 && id->retvar
5201 && VAR_P (id->retvar)
5202 && id->retvar != return_slot
5203 && id->retvar != modify_dest
5204 && !TREE_THIS_VOLATILE (id->retvar)
5205 && !is_gimple_reg (id->retvar))
5207 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5208 gimple *clobber_stmt;
5209 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5210 gimple_set_location (clobber_stmt, gimple_location (stmt));
5211 gsi_replace (&stmt_gsi, clobber_stmt, false);
5212 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5214 else
5215 gsi_remove (&stmt_gsi, true);
5218 if (purge_dead_abnormal_edges)
5219 bitmap_set_bit (to_purge, return_block->index);
5221 /* If the value of the new expression is ignored, that's OK. We
5222 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5223 the equivalent inlined version either. */
5224 if (is_gimple_assign (stmt))
5226 gcc_assert (gimple_assign_single_p (stmt)
5227 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5228 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5231 id->add_clobbers_to_eh_landing_pads = 0;
5233 /* Output the inlining info for this abstract function, since it has been
5234 inlined. If we don't do this now, we can lose the information about the
5235 variables in the function when the blocks get blown away as soon as we
5236 remove the cgraph node. */
5237 if (gimple_block (stmt))
5238 (*debug_hooks->outlining_inline_function) (fn);
5240 /* Update callgraph if needed. */
5241 cg_edge->callee->remove ();
5243 id->block = NULL_TREE;
5244 id->retvar = NULL_TREE;
5245 successfully_inlined = true;
5247 egress:
5248 input_location = saved_location;
5249 return successfully_inlined;
5252 /* Expand call statements reachable from STMT_P.
5253 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5254 in a MODIFY_EXPR. */
5256 static bool
5257 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5258 bitmap to_purge)
5260 gimple_stmt_iterator gsi;
5261 bool inlined = false;
5263 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5265 gimple *stmt = gsi_stmt (gsi);
5266 gsi_prev (&gsi);
5268 if (is_gimple_call (stmt)
5269 && !gimple_call_internal_p (stmt))
5270 inlined |= expand_call_inline (bb, stmt, id, to_purge);
5273 return inlined;
5277 /* Walk all basic blocks created after FIRST and try to fold every statement
5278 in the STATEMENTS pointer set. */
5280 static void
5281 fold_marked_statements (int first, hash_set<gimple *> *statements)
5283 auto_bitmap to_purge;
5285 auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5286 auto_sbitmap visited (last_basic_block_for_fn (cfun));
5287 bitmap_clear (visited);
5289 stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5290 while (!stack.is_empty ())
5292 /* Look at the edge on the top of the stack. */
5293 edge e = stack.pop ();
5294 basic_block dest = e->dest;
5296 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5297 || bitmap_bit_p (visited, dest->index))
5298 continue;
5300 bitmap_set_bit (visited, dest->index);
5302 if (dest->index >= first)
5303 for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5304 !gsi_end_p (gsi); gsi_next (&gsi))
5306 if (!statements->contains (gsi_stmt (gsi)))
5307 continue;
5309 gimple *old_stmt = gsi_stmt (gsi);
5310 tree old_decl = (is_gimple_call (old_stmt)
5311 ? gimple_call_fndecl (old_stmt) : 0);
5312 if (old_decl && fndecl_built_in_p (old_decl))
5314 /* Folding builtins can create multiple instructions,
5315 we need to look at all of them. */
5316 gimple_stmt_iterator i2 = gsi;
5317 gsi_prev (&i2);
5318 if (fold_stmt (&gsi))
5320 gimple *new_stmt;
5321 /* If a builtin at the end of a bb folded into nothing,
5322 the following loop won't work. */
5323 if (gsi_end_p (gsi))
5325 cgraph_update_edges_for_call_stmt (old_stmt,
5326 old_decl, NULL);
5327 break;
5329 if (gsi_end_p (i2))
5330 i2 = gsi_start_bb (dest);
5331 else
5332 gsi_next (&i2);
5333 while (1)
5335 new_stmt = gsi_stmt (i2);
5336 update_stmt (new_stmt);
5337 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5338 new_stmt);
5340 if (new_stmt == gsi_stmt (gsi))
5342 /* It is okay to check only for the very last
5343 of these statements. If it is a throwing
5344 statement nothing will change. If it isn't
5345 this can remove EH edges. If that weren't
5346 correct then because some intermediate stmts
5347 throw, but not the last one. That would mean
5348 we'd have to split the block, which we can't
5349 here and we'd loose anyway. And as builtins
5350 probably never throw, this all
5351 is mood anyway. */
5352 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5353 new_stmt))
5354 bitmap_set_bit (to_purge, dest->index);
5355 break;
5357 gsi_next (&i2);
5361 else if (fold_stmt (&gsi))
5363 /* Re-read the statement from GSI as fold_stmt() may
5364 have changed it. */
5365 gimple *new_stmt = gsi_stmt (gsi);
5366 update_stmt (new_stmt);
5368 if (is_gimple_call (old_stmt)
5369 || is_gimple_call (new_stmt))
5370 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5371 new_stmt);
5373 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5374 bitmap_set_bit (to_purge, dest->index);
5378 if (EDGE_COUNT (dest->succs) > 0)
5380 /* Avoid warnings emitted from folding statements that
5381 became unreachable because of inlined function parameter
5382 propagation. */
5383 e = find_taken_edge (dest, NULL_TREE);
5384 if (e)
5385 stack.quick_push (e);
5386 else
5388 edge_iterator ei;
5389 FOR_EACH_EDGE (e, ei, dest->succs)
5390 stack.safe_push (e);
5395 gimple_purge_all_dead_eh_edges (to_purge);
5398 /* Expand calls to inline functions in the body of FN. */
5400 unsigned int
5401 optimize_inline_calls (tree fn)
5403 copy_body_data id;
5404 basic_block bb;
5405 int last = n_basic_blocks_for_fn (cfun);
5406 bool inlined_p = false;
5408 /* Clear out ID. */
5409 memset (&id, 0, sizeof (id));
5411 id.src_node = id.dst_node = cgraph_node::get (fn);
5412 gcc_assert (id.dst_node->definition);
5413 id.dst_fn = fn;
5414 /* Or any functions that aren't finished yet. */
5415 if (current_function_decl)
5416 id.dst_fn = current_function_decl;
5418 id.copy_decl = copy_decl_maybe_to_var;
5419 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5420 id.transform_new_cfg = false;
5421 id.transform_return_to_modify = true;
5422 id.transform_parameter = true;
5423 id.transform_lang_insert_block = NULL;
5424 id.statements_to_fold = new hash_set<gimple *>;
5426 push_gimplify_context ();
5428 /* We make no attempts to keep dominance info up-to-date. */
5429 free_dominance_info (CDI_DOMINATORS);
5430 free_dominance_info (CDI_POST_DOMINATORS);
5432 /* Register specific gimple functions. */
5433 gimple_register_cfg_hooks ();
5435 /* Reach the trees by walking over the CFG, and note the
5436 enclosing basic-blocks in the call edges. */
5437 /* We walk the blocks going forward, because inlined function bodies
5438 will split id->current_basic_block, and the new blocks will
5439 follow it; we'll trudge through them, processing their CALL_EXPRs
5440 along the way. */
5441 auto_bitmap to_purge;
5442 FOR_EACH_BB_FN (bb, cfun)
5443 inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5445 pop_gimplify_context (NULL);
5447 if (flag_checking)
5449 struct cgraph_edge *e;
5451 id.dst_node->verify ();
5453 /* Double check that we inlined everything we are supposed to inline. */
5454 for (e = id.dst_node->callees; e; e = e->next_callee)
5455 gcc_assert (e->inline_failed);
5458 /* If we didn't inline into the function there is nothing to do. */
5459 if (!inlined_p)
5461 delete id.statements_to_fold;
5462 return 0;
5465 /* Fold queued statements. */
5466 update_max_bb_count ();
5467 fold_marked_statements (last, id.statements_to_fold);
5468 delete id.statements_to_fold;
5470 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5471 We need to do this after fold_marked_statements since that may walk
5472 the SSA use-def chain. */
5473 unsigned i;
5474 bitmap_iterator bi;
5475 EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5477 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5478 if (bb)
5480 gimple_purge_dead_eh_edges (bb);
5481 gimple_purge_dead_abnormal_call_edges (bb);
5485 gcc_assert (!id.debug_stmts.exists ());
5487 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5488 number_blocks (fn);
5490 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5491 id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5493 if (flag_checking)
5494 id.dst_node->verify ();
5496 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5497 not possible yet - the IPA passes might make various functions to not
5498 throw and they don't care to proactively update local EH info. This is
5499 done later in fixup_cfg pass that also execute the verification. */
5500 return (TODO_update_ssa
5501 | TODO_cleanup_cfg
5502 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5503 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5504 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5505 ? TODO_rebuild_frequencies : 0));
5508 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5510 tree
5511 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5513 enum tree_code code = TREE_CODE (*tp);
5514 enum tree_code_class cl = TREE_CODE_CLASS (code);
5516 /* We make copies of most nodes. */
5517 if (IS_EXPR_CODE_CLASS (cl)
5518 || code == TREE_LIST
5519 || code == TREE_VEC
5520 || code == TYPE_DECL
5521 || code == OMP_CLAUSE)
5523 /* Because the chain gets clobbered when we make a copy, we save it
5524 here. */
5525 tree chain = NULL_TREE, new_tree;
5527 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5528 chain = TREE_CHAIN (*tp);
5530 /* Copy the node. */
5531 new_tree = copy_node (*tp);
5533 *tp = new_tree;
5535 /* Now, restore the chain, if appropriate. That will cause
5536 walk_tree to walk into the chain as well. */
5537 if (code == PARM_DECL
5538 || code == TREE_LIST
5539 || code == OMP_CLAUSE)
5540 TREE_CHAIN (*tp) = chain;
5542 /* For now, we don't update BLOCKs when we make copies. So, we
5543 have to nullify all BIND_EXPRs. */
5544 if (TREE_CODE (*tp) == BIND_EXPR)
5545 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5547 else if (code == CONSTRUCTOR)
5549 /* CONSTRUCTOR nodes need special handling because
5550 we need to duplicate the vector of elements. */
5551 tree new_tree;
5553 new_tree = copy_node (*tp);
5554 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5555 *tp = new_tree;
5557 else if (code == STATEMENT_LIST)
5558 /* We used to just abort on STATEMENT_LIST, but we can run into them
5559 with statement-expressions (c++/40975). */
5560 copy_statement_list (tp);
5561 else if (TREE_CODE_CLASS (code) == tcc_type)
5562 *walk_subtrees = 0;
5563 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5564 *walk_subtrees = 0;
5565 else if (TREE_CODE_CLASS (code) == tcc_constant)
5566 *walk_subtrees = 0;
5567 return NULL_TREE;
5570 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5571 information indicating to what new SAVE_EXPR this one should be mapped,
5572 use that one. Otherwise, create a new node and enter it in ST. FN is
5573 the function into which the copy will be placed. */
5575 static void
5576 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5578 tree *n;
5579 tree t;
5581 /* See if we already encountered this SAVE_EXPR. */
5582 n = st->get (*tp);
5584 /* If we didn't already remap this SAVE_EXPR, do so now. */
5585 if (!n)
5587 t = copy_node (*tp);
5589 /* Remember this SAVE_EXPR. */
5590 st->put (*tp, t);
5591 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5592 st->put (t, t);
5594 else
5596 /* We've already walked into this SAVE_EXPR; don't do it again. */
5597 *walk_subtrees = 0;
5598 t = *n;
5601 /* Replace this SAVE_EXPR with the copy. */
5602 *tp = t;
5605 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5606 label, copies the declaration and enters it in the splay_tree in DATA (which
5607 is really a 'copy_body_data *'. */
5609 static tree
5610 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5611 bool *handled_ops_p ATTRIBUTE_UNUSED,
5612 struct walk_stmt_info *wi)
5614 copy_body_data *id = (copy_body_data *) wi->info;
5615 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5617 if (stmt)
5619 tree decl = gimple_label_label (stmt);
5621 /* Copy the decl and remember the copy. */
5622 insert_decl_map (id, decl, id->copy_decl (decl, id));
5625 return NULL_TREE;
5628 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5629 struct walk_stmt_info *wi);
5631 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5632 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5633 remaps all local declarations to appropriate replacements in gimple
5634 operands. */
5636 static tree
5637 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5639 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5640 copy_body_data *id = (copy_body_data *) wi->info;
5641 hash_map<tree, tree> *st = id->decl_map;
5642 tree *n;
5643 tree expr = *tp;
5645 /* For recursive invocations this is no longer the LHS itself. */
5646 bool is_lhs = wi->is_lhs;
5647 wi->is_lhs = false;
5649 if (TREE_CODE (expr) == SSA_NAME)
5651 *tp = remap_ssa_name (*tp, id);
5652 *walk_subtrees = 0;
5653 if (is_lhs)
5654 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5656 /* Only a local declaration (variable or label). */
5657 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5658 || TREE_CODE (expr) == LABEL_DECL)
5660 /* Lookup the declaration. */
5661 n = st->get (expr);
5663 /* If it's there, remap it. */
5664 if (n)
5665 *tp = *n;
5666 *walk_subtrees = 0;
5668 else if (TREE_CODE (expr) == STATEMENT_LIST
5669 || TREE_CODE (expr) == BIND_EXPR
5670 || TREE_CODE (expr) == SAVE_EXPR)
5671 gcc_unreachable ();
5672 else if (TREE_CODE (expr) == TARGET_EXPR)
5674 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5675 It's OK for this to happen if it was part of a subtree that
5676 isn't immediately expanded, such as operand 2 of another
5677 TARGET_EXPR. */
5678 if (!TREE_OPERAND (expr, 1))
5680 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5681 TREE_OPERAND (expr, 3) = NULL_TREE;
5684 else if (TREE_CODE (expr) == OMP_CLAUSE)
5686 /* Before the omplower pass completes, some OMP clauses can contain
5687 sequences that are neither copied by gimple_seq_copy nor walked by
5688 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5689 in those situations, we have to copy and process them explicitely. */
5691 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5693 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5694 seq = duplicate_remap_omp_clause_seq (seq, wi);
5695 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5697 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5699 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5700 seq = duplicate_remap_omp_clause_seq (seq, wi);
5701 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5703 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5705 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5706 seq = duplicate_remap_omp_clause_seq (seq, wi);
5707 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5708 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5709 seq = duplicate_remap_omp_clause_seq (seq, wi);
5710 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5714 /* Keep iterating. */
5715 return NULL_TREE;
5719 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5720 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5721 remaps all local declarations to appropriate replacements in gimple
5722 statements. */
5724 static tree
5725 replace_locals_stmt (gimple_stmt_iterator *gsip,
5726 bool *handled_ops_p ATTRIBUTE_UNUSED,
5727 struct walk_stmt_info *wi)
5729 copy_body_data *id = (copy_body_data *) wi->info;
5730 gimple *gs = gsi_stmt (*gsip);
5732 if (gbind *stmt = dyn_cast <gbind *> (gs))
5734 tree block = gimple_bind_block (stmt);
5736 if (block)
5738 remap_block (&block, id);
5739 gimple_bind_set_block (stmt, block);
5742 /* This will remap a lot of the same decls again, but this should be
5743 harmless. */
5744 if (gimple_bind_vars (stmt))
5746 tree old_var, decls = gimple_bind_vars (stmt);
5748 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5749 if (!can_be_nonlocal (old_var, id)
5750 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5751 remap_decl (old_var, id);
5753 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5754 id->prevent_decl_creation_for_types = true;
5755 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5756 id->prevent_decl_creation_for_types = false;
5760 /* Keep iterating. */
5761 return NULL_TREE;
5764 /* Create a copy of SEQ and remap all decls in it. */
5766 static gimple_seq
5767 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5769 if (!seq)
5770 return NULL;
5772 /* If there are any labels in OMP sequences, they can be only referred to in
5773 the sequence itself and therefore we can do both here. */
5774 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5775 gimple_seq copy = gimple_seq_copy (seq);
5776 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5777 return copy;
5780 /* Copies everything in SEQ and replaces variables and labels local to
5781 current_function_decl. */
5783 gimple_seq
5784 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5786 copy_body_data id;
5787 struct walk_stmt_info wi;
5788 gimple_seq copy;
5790 /* There's nothing to do for NULL_TREE. */
5791 if (seq == NULL)
5792 return seq;
5794 /* Set up ID. */
5795 memset (&id, 0, sizeof (id));
5796 id.src_fn = current_function_decl;
5797 id.dst_fn = current_function_decl;
5798 id.src_cfun = cfun;
5799 id.decl_map = new hash_map<tree, tree>;
5800 id.debug_map = NULL;
5802 id.copy_decl = copy_decl_no_change;
5803 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5804 id.transform_new_cfg = false;
5805 id.transform_return_to_modify = false;
5806 id.transform_parameter = false;
5807 id.transform_lang_insert_block = NULL;
5809 /* Walk the tree once to find local labels. */
5810 memset (&wi, 0, sizeof (wi));
5811 hash_set<tree> visited;
5812 wi.info = &id;
5813 wi.pset = &visited;
5814 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5816 copy = gimple_seq_copy (seq);
5818 /* Walk the copy, remapping decls. */
5819 memset (&wi, 0, sizeof (wi));
5820 wi.info = &id;
5821 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5823 /* Clean up. */
5824 delete id.decl_map;
5825 if (id.debug_map)
5826 delete id.debug_map;
5827 if (id.dependence_map)
5829 delete id.dependence_map;
5830 id.dependence_map = NULL;
5833 return copy;
5837 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5839 static tree
5840 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5842 if (*tp == data)
5843 return (tree) data;
5844 else
5845 return NULL;
5848 DEBUG_FUNCTION bool
5849 debug_find_tree (tree top, tree search)
5851 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5855 /* Declare the variables created by the inliner. Add all the variables in
5856 VARS to BIND_EXPR. */
5858 static void
5859 declare_inline_vars (tree block, tree vars)
5861 tree t;
5862 for (t = vars; t; t = DECL_CHAIN (t))
5864 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5865 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5866 add_local_decl (cfun, t);
5869 if (block)
5870 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5873 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5874 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5875 VAR_DECL translation. */
5877 tree
5878 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5880 /* Don't generate debug information for the copy if we wouldn't have
5881 generated it for the copy either. */
5882 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5883 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5885 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5886 declaration inspired this copy. */
5887 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5889 /* The new variable/label has no RTL, yet. */
5890 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5891 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5892 SET_DECL_RTL (copy, 0);
5893 /* For vector typed decls make sure to update DECL_MODE according
5894 to the new function context. */
5895 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5896 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5898 /* These args would always appear unused, if not for this. */
5899 TREE_USED (copy) = 1;
5901 /* Set the context for the new declaration. */
5902 if (!DECL_CONTEXT (decl))
5903 /* Globals stay global. */
5905 else if (DECL_CONTEXT (decl) != id->src_fn)
5906 /* Things that weren't in the scope of the function we're inlining
5907 from aren't in the scope we're inlining to, either. */
5909 else if (TREE_STATIC (decl))
5910 /* Function-scoped static variables should stay in the original
5911 function. */
5913 else
5915 /* Ordinary automatic local variables are now in the scope of the
5916 new function. */
5917 DECL_CONTEXT (copy) = id->dst_fn;
5918 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5920 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5921 DECL_ATTRIBUTES (copy)
5922 = tree_cons (get_identifier ("omp simt private"), NULL,
5923 DECL_ATTRIBUTES (copy));
5924 id->dst_simt_vars->safe_push (copy);
5928 return copy;
5931 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5932 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5933 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5935 tree
5936 copy_decl_to_var (tree decl, copy_body_data *id)
5938 tree copy, type;
5940 gcc_assert (TREE_CODE (decl) == PARM_DECL
5941 || TREE_CODE (decl) == RESULT_DECL);
5943 type = TREE_TYPE (decl);
5945 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5946 VAR_DECL, DECL_NAME (decl), type);
5947 if (DECL_PT_UID_SET_P (decl))
5948 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5949 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5950 TREE_READONLY (copy) = TREE_READONLY (decl);
5951 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5952 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
5953 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
5955 return copy_decl_for_dup_finish (id, decl, copy);
5958 /* Like copy_decl_to_var, but create a return slot object instead of a
5959 pointer variable for return by invisible reference. */
5961 static tree
5962 copy_result_decl_to_var (tree decl, copy_body_data *id)
5964 tree copy, type;
5966 gcc_assert (TREE_CODE (decl) == PARM_DECL
5967 || TREE_CODE (decl) == RESULT_DECL);
5969 type = TREE_TYPE (decl);
5970 if (DECL_BY_REFERENCE (decl))
5971 type = TREE_TYPE (type);
5973 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5974 VAR_DECL, DECL_NAME (decl), type);
5975 if (DECL_PT_UID_SET_P (decl))
5976 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5977 TREE_READONLY (copy) = TREE_READONLY (decl);
5978 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5979 if (!DECL_BY_REFERENCE (decl))
5981 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5982 DECL_NOT_GIMPLE_REG_P (copy)
5983 = (DECL_NOT_GIMPLE_REG_P (decl)
5984 /* RESULT_DECLs are treated special by needs_to_live_in_memory,
5985 mirror that to the created VAR_DECL. */
5986 || (TREE_CODE (decl) == RESULT_DECL
5987 && aggregate_value_p (decl, id->src_fn)));
5990 return copy_decl_for_dup_finish (id, decl, copy);
5993 tree
5994 copy_decl_no_change (tree decl, copy_body_data *id)
5996 tree copy;
5998 copy = copy_node (decl);
6000 /* The COPY is not abstract; it will be generated in DST_FN. */
6001 DECL_ABSTRACT_P (copy) = false;
6002 lang_hooks.dup_lang_specific_decl (copy);
6004 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6005 been taken; it's for internal bookkeeping in expand_goto_internal. */
6006 if (TREE_CODE (copy) == LABEL_DECL)
6008 TREE_ADDRESSABLE (copy) = 0;
6009 LABEL_DECL_UID (copy) = -1;
6012 return copy_decl_for_dup_finish (id, decl, copy);
6015 static tree
6016 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6018 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6019 return copy_decl_to_var (decl, id);
6020 else
6021 return copy_decl_no_change (decl, id);
6024 /* Return a copy of the function's argument tree without any modifications. */
6026 static tree
6027 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6029 tree arg, *parg;
6030 tree new_parm = NULL;
6032 parg = &new_parm;
6033 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6035 tree new_tree = remap_decl (arg, id);
6036 if (TREE_CODE (new_tree) != PARM_DECL)
6037 new_tree = id->copy_decl (arg, id);
6038 lang_hooks.dup_lang_specific_decl (new_tree);
6039 *parg = new_tree;
6040 parg = &DECL_CHAIN (new_tree);
6042 return new_parm;
6045 /* Return a copy of the function's static chain. */
6046 static tree
6047 copy_static_chain (tree static_chain, copy_body_data * id)
6049 tree *chain_copy, *pvar;
6051 chain_copy = &static_chain;
6052 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6054 tree new_tree = remap_decl (*pvar, id);
6055 lang_hooks.dup_lang_specific_decl (new_tree);
6056 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6057 *pvar = new_tree;
6059 return static_chain;
6062 /* Return true if the function is allowed to be versioned.
6063 This is a guard for the versioning functionality. */
6065 bool
6066 tree_versionable_function_p (tree fndecl)
6068 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6069 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6072 /* Update clone info after duplication. */
6074 static void
6075 update_clone_info (copy_body_data * id)
6077 vec<ipa_param_performed_split, va_gc> *cur_performed_splits
6078 = id->dst_node->clone.performed_splits;
6079 if (cur_performed_splits)
6081 unsigned len = cur_performed_splits->length ();
6082 for (unsigned i = 0; i < len; i++)
6084 ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
6085 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6089 struct cgraph_node *node;
6090 if (!id->dst_node->clones)
6091 return;
6092 for (node = id->dst_node->clones; node != id->dst_node;)
6094 /* First update replace maps to match the new body. */
6095 if (node->clone.tree_map)
6097 unsigned int i;
6098 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
6100 struct ipa_replace_map *replace_info;
6101 replace_info = (*node->clone.tree_map)[i];
6102 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6105 if (node->clone.performed_splits)
6107 unsigned len = vec_safe_length (node->clone.performed_splits);
6108 for (unsigned i = 0; i < len; i++)
6110 ipa_param_performed_split *ps
6111 = &(*node->clone.performed_splits)[i];
6112 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6115 if (unsigned len = vec_safe_length (cur_performed_splits))
6117 /* We do not want to add current performed splits when we are saving
6118 a copy of function body for later during inlining, that would just
6119 duplicate all entries. So let's have a look whether anything
6120 referring to the first dummy_decl is present. */
6121 unsigned dst_len = vec_safe_length (node->clone.performed_splits);
6122 ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6123 for (unsigned i = 0; i < dst_len; i++)
6124 if ((*node->clone.performed_splits)[i].dummy_decl
6125 == first->dummy_decl)
6127 len = 0;
6128 break;
6131 for (unsigned i = 0; i < len; i++)
6132 vec_safe_push (node->clone.performed_splits,
6133 (*cur_performed_splits)[i]);
6134 if (flag_checking)
6136 for (unsigned i = 0; i < dst_len; i++)
6138 ipa_param_performed_split *ps1
6139 = &(*node->clone.performed_splits)[i];
6140 for (unsigned j = i + 1; j < dst_len; j++)
6142 ipa_param_performed_split *ps2
6143 = &(*node->clone.performed_splits)[j];
6144 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6145 || ps1->unit_offset != ps2->unit_offset);
6151 if (node->clones)
6152 node = node->clones;
6153 else if (node->next_sibling_clone)
6154 node = node->next_sibling_clone;
6155 else
6157 while (node != id->dst_node && !node->next_sibling_clone)
6158 node = node->clone_of;
6159 if (node != id->dst_node)
6160 node = node->next_sibling_clone;
6165 /* Create a copy of a function's tree.
6166 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6167 of the original function and the new copied function
6168 respectively. In case we want to replace a DECL
6169 tree with another tree while duplicating the function's
6170 body, TREE_MAP represents the mapping between these
6171 trees. If UPDATE_CLONES is set, the call_stmt fields
6172 of edges of clones of the function will be updated.
6174 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6175 function parameters and return value) should be modified).
6176 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6177 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6179 void
6180 tree_function_versioning (tree old_decl, tree new_decl,
6181 vec<ipa_replace_map *, va_gc> *tree_map,
6182 ipa_param_adjustments *param_adjustments,
6183 bool update_clones, bitmap blocks_to_copy,
6184 basic_block new_entry)
6186 struct cgraph_node *old_version_node;
6187 struct cgraph_node *new_version_node;
6188 copy_body_data id;
6189 tree p;
6190 unsigned i;
6191 struct ipa_replace_map *replace_info;
6192 basic_block old_entry_block, bb;
6193 auto_vec<gimple *, 10> init_stmts;
6194 tree vars = NULL_TREE;
6196 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6197 && TREE_CODE (new_decl) == FUNCTION_DECL);
6198 DECL_POSSIBLY_INLINED (old_decl) = 1;
6200 old_version_node = cgraph_node::get (old_decl);
6201 gcc_checking_assert (old_version_node);
6202 new_version_node = cgraph_node::get (new_decl);
6203 gcc_checking_assert (new_version_node);
6205 /* Copy over debug args. */
6206 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6208 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6209 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6210 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6211 old_debug_args = decl_debug_args_lookup (old_decl);
6212 if (old_debug_args)
6214 new_debug_args = decl_debug_args_insert (new_decl);
6215 *new_debug_args = vec_safe_copy (*old_debug_args);
6219 /* Output the inlining info for this abstract function, since it has been
6220 inlined. If we don't do this now, we can lose the information about the
6221 variables in the function when the blocks get blown away as soon as we
6222 remove the cgraph node. */
6223 (*debug_hooks->outlining_inline_function) (old_decl);
6225 DECL_ARTIFICIAL (new_decl) = 1;
6226 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6227 if (DECL_ORIGIN (old_decl) == old_decl)
6228 old_version_node->used_as_abstract_origin = true;
6229 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6231 /* Prepare the data structures for the tree copy. */
6232 memset (&id, 0, sizeof (id));
6234 /* Generate a new name for the new version. */
6235 id.statements_to_fold = new hash_set<gimple *>;
6237 id.decl_map = new hash_map<tree, tree>;
6238 id.debug_map = NULL;
6239 id.src_fn = old_decl;
6240 id.dst_fn = new_decl;
6241 id.src_node = old_version_node;
6242 id.dst_node = new_version_node;
6243 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6244 id.blocks_to_copy = blocks_to_copy;
6246 id.copy_decl = copy_decl_no_change;
6247 id.transform_call_graph_edges
6248 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6249 id.transform_new_cfg = true;
6250 id.transform_return_to_modify = false;
6251 id.transform_parameter = false;
6252 id.transform_lang_insert_block = NULL;
6254 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6255 (DECL_STRUCT_FUNCTION (old_decl));
6256 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6257 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6258 initialize_cfun (new_decl, old_decl,
6259 new_entry ? new_entry->count : old_entry_block->count);
6260 new_version_node->calls_declare_variant_alt
6261 = old_version_node->calls_declare_variant_alt;
6262 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6263 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6264 = id.src_cfun->gimple_df->ipa_pta;
6266 /* Copy the function's static chain. */
6267 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6268 if (p)
6269 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6270 = copy_static_chain (p, &id);
6272 auto_vec<int, 16> new_param_indices;
6273 ipa_param_adjustments *old_param_adjustments
6274 = old_version_node->clone.param_adjustments;
6275 if (old_param_adjustments)
6276 old_param_adjustments->get_updated_indices (&new_param_indices);
6278 /* If there's a tree_map, prepare for substitution. */
6279 if (tree_map)
6280 for (i = 0; i < tree_map->length (); i++)
6282 gimple *init;
6283 replace_info = (*tree_map)[i];
6285 int p = replace_info->parm_num;
6286 if (old_param_adjustments)
6287 p = new_param_indices[p];
6289 tree parm;
6290 for (parm = DECL_ARGUMENTS (old_decl); p;
6291 parm = DECL_CHAIN (parm))
6292 p--;
6293 gcc_assert (parm);
6294 init = setup_one_parameter (&id, parm, replace_info->new_tree,
6295 id.src_fn, NULL, &vars);
6296 if (init)
6297 init_stmts.safe_push (init);
6300 ipa_param_body_adjustments *param_body_adjs = NULL;
6301 if (param_adjustments)
6303 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6304 new_decl, old_decl,
6305 &id, &vars, tree_map);
6306 id.param_body_adjs = param_body_adjs;
6307 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6309 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6310 DECL_ARGUMENTS (new_decl)
6311 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6313 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6314 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6316 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6318 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6319 /* Add local vars. */
6320 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6322 if (DECL_RESULT (old_decl) == NULL_TREE)
6324 else if (param_adjustments && param_adjustments->m_skip_return
6325 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6327 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6328 &id);
6329 declare_inline_vars (NULL, resdecl_repl);
6330 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6332 DECL_RESULT (new_decl)
6333 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6334 RESULT_DECL, NULL_TREE, void_type_node);
6335 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6336 DECL_IS_MALLOC (new_decl) = false;
6337 cfun->returns_struct = 0;
6338 cfun->returns_pcc_struct = 0;
6340 else
6342 tree old_name;
6343 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6344 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6345 if (gimple_in_ssa_p (id.src_cfun)
6346 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6347 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6349 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6350 insert_decl_map (&id, old_name, new_name);
6351 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6352 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6356 /* Set up the destination functions loop tree. */
6357 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6359 cfun->curr_properties &= ~PROP_loops;
6360 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6361 cfun->curr_properties |= PROP_loops;
6364 /* Copy the Function's body. */
6365 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6366 new_entry);
6368 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6369 number_blocks (new_decl);
6371 /* We want to create the BB unconditionally, so that the addition of
6372 debug stmts doesn't affect BB count, which may in the end cause
6373 codegen differences. */
6374 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6375 while (init_stmts.length ())
6376 insert_init_stmt (&id, bb, init_stmts.pop ());
6377 update_clone_info (&id);
6379 /* Remap the nonlocal_goto_save_area, if any. */
6380 if (cfun->nonlocal_goto_save_area)
6382 struct walk_stmt_info wi;
6384 memset (&wi, 0, sizeof (wi));
6385 wi.info = &id;
6386 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6389 /* Clean up. */
6390 delete id.decl_map;
6391 if (id.debug_map)
6392 delete id.debug_map;
6393 free_dominance_info (CDI_DOMINATORS);
6394 free_dominance_info (CDI_POST_DOMINATORS);
6396 update_max_bb_count ();
6397 fold_marked_statements (0, id.statements_to_fold);
6398 delete id.statements_to_fold;
6399 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6400 if (id.dst_node->definition)
6401 cgraph_edge::rebuild_references ();
6402 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6404 calculate_dominance_info (CDI_DOMINATORS);
6405 fix_loop_structure (NULL);
6407 update_ssa (TODO_update_ssa);
6409 /* After partial cloning we need to rescale frequencies, so they are
6410 within proper range in the cloned function. */
6411 if (new_entry)
6413 struct cgraph_edge *e;
6414 rebuild_frequencies ();
6416 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6417 for (e = new_version_node->callees; e; e = e->next_callee)
6419 basic_block bb = gimple_bb (e->call_stmt);
6420 e->count = bb->count;
6422 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6424 basic_block bb = gimple_bb (e->call_stmt);
6425 e->count = bb->count;
6429 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6431 vec<tree, va_gc> **debug_args = NULL;
6432 unsigned int len = 0;
6433 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6435 for (i = 0; i < reset_len; i++)
6437 tree parm = param_body_adjs->m_reset_debug_decls[i];
6438 gcc_assert (is_gimple_reg (parm));
6439 tree ddecl;
6441 if (debug_args == NULL)
6443 debug_args = decl_debug_args_insert (new_decl);
6444 len = vec_safe_length (*debug_args);
6446 ddecl = make_node (DEBUG_EXPR_DECL);
6447 DECL_ARTIFICIAL (ddecl) = 1;
6448 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6449 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6450 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6451 vec_safe_push (*debug_args, ddecl);
6453 if (debug_args != NULL)
6455 /* On the callee side, add
6456 DEBUG D#Y s=> parm
6457 DEBUG var => D#Y
6458 stmts to the first bb where var is a VAR_DECL created for the
6459 optimized away parameter in DECL_INITIAL block. This hints
6460 in the debug info that var (whole DECL_ORIGIN is the parm
6461 PARM_DECL) is optimized away, but could be looked up at the
6462 call site as value of D#X there. */
6463 tree vexpr;
6464 gimple_stmt_iterator cgsi
6465 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6466 gimple *def_temp;
6467 tree var = vars;
6468 i = vec_safe_length (*debug_args);
6471 i -= 2;
6472 while (var != NULL_TREE
6473 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6474 var = TREE_CHAIN (var);
6475 if (var == NULL_TREE)
6476 break;
6477 vexpr = make_node (DEBUG_EXPR_DECL);
6478 tree parm = (**debug_args)[i];
6479 DECL_ARTIFICIAL (vexpr) = 1;
6480 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6481 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6482 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6483 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6484 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6485 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6487 while (i > len);
6490 delete param_body_adjs;
6491 free_dominance_info (CDI_DOMINATORS);
6492 free_dominance_info (CDI_POST_DOMINATORS);
6494 gcc_assert (!id.debug_stmts.exists ());
6495 pop_cfun ();
6496 return;
6499 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6500 the callee and return the inlined body on success. */
6502 tree
6503 maybe_inline_call_in_expr (tree exp)
6505 tree fn = get_callee_fndecl (exp);
6507 /* We can only try to inline "const" functions. */
6508 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6510 call_expr_arg_iterator iter;
6511 copy_body_data id;
6512 tree param, arg, t;
6513 hash_map<tree, tree> decl_map;
6515 /* Remap the parameters. */
6516 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6517 param;
6518 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6519 decl_map.put (param, arg);
6521 memset (&id, 0, sizeof (id));
6522 id.src_fn = fn;
6523 id.dst_fn = current_function_decl;
6524 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6525 id.decl_map = &decl_map;
6527 id.copy_decl = copy_decl_no_change;
6528 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6529 id.transform_new_cfg = false;
6530 id.transform_return_to_modify = true;
6531 id.transform_parameter = true;
6532 id.transform_lang_insert_block = NULL;
6534 /* Make sure not to unshare trees behind the front-end's back
6535 since front-end specific mechanisms may rely on sharing. */
6536 id.regimplify = false;
6537 id.do_not_unshare = true;
6539 /* We're not inside any EH region. */
6540 id.eh_lp_nr = 0;
6542 t = copy_tree_body (&id);
6544 /* We can only return something suitable for use in a GENERIC
6545 expression tree. */
6546 if (TREE_CODE (t) == MODIFY_EXPR)
6547 return TREE_OPERAND (t, 1);
6550 return NULL_TREE;
6553 /* Duplicate a type, fields and all. */
6555 tree
6556 build_duplicate_type (tree type)
6558 struct copy_body_data id;
6560 memset (&id, 0, sizeof (id));
6561 id.src_fn = current_function_decl;
6562 id.dst_fn = current_function_decl;
6563 id.src_cfun = cfun;
6564 id.decl_map = new hash_map<tree, tree>;
6565 id.debug_map = NULL;
6566 id.copy_decl = copy_decl_no_change;
6568 type = remap_type_1 (type, &id);
6570 delete id.decl_map;
6571 if (id.debug_map)
6572 delete id.debug_map;
6574 TYPE_CANONICAL (type) = type;
6576 return type;
6579 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6580 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6581 evaluation. */
6583 tree
6584 copy_fn (tree fn, tree& parms, tree& result)
6586 copy_body_data id;
6587 tree param;
6588 hash_map<tree, tree> decl_map;
6590 tree *p = &parms;
6591 *p = NULL_TREE;
6593 memset (&id, 0, sizeof (id));
6594 id.src_fn = fn;
6595 id.dst_fn = current_function_decl;
6596 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6597 id.decl_map = &decl_map;
6599 id.copy_decl = copy_decl_no_change;
6600 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6601 id.transform_new_cfg = false;
6602 id.transform_return_to_modify = false;
6603 id.transform_parameter = true;
6604 id.transform_lang_insert_block = NULL;
6606 /* Make sure not to unshare trees behind the front-end's back
6607 since front-end specific mechanisms may rely on sharing. */
6608 id.regimplify = false;
6609 id.do_not_unshare = true;
6610 id.do_not_fold = true;
6612 /* We're not inside any EH region. */
6613 id.eh_lp_nr = 0;
6615 /* Remap the parameters and result and return them to the caller. */
6616 for (param = DECL_ARGUMENTS (fn);
6617 param;
6618 param = DECL_CHAIN (param))
6620 *p = remap_decl (param, &id);
6621 p = &DECL_CHAIN (*p);
6624 if (DECL_RESULT (fn))
6625 result = remap_decl (DECL_RESULT (fn), &id);
6626 else
6627 result = NULL_TREE;
6629 return copy_tree_body (&id);