aix: align double complex
[official-gcc.git] / gcc / tree-inline.c
blob1dcb31c02676d36fb3a6b074f1a00d20a5e13a94
1 /* Tree inlining.
2 Copyright (C) 2001-2021 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64 #include "alloc-pool.h"
65 #include "symbol-summary.h"
66 #include "symtab-thunks.h"
67 #include "symtab-clones.h"
69 /* I'm not real happy about this, but we need to handle gimple and
70 non-gimple trees. */
72 /* Inlining, Cloning, Versioning, Parallelization
74 Inlining: a function body is duplicated, but the PARM_DECLs are
75 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
76 MODIFY_EXPRs that store to a dedicated returned-value variable.
77 The duplicated eh_region info of the copy will later be appended
78 to the info for the caller; the eh_region info in copied throwing
79 statements and RESX statements are adjusted accordingly.
81 Cloning: (only in C++) We have one body for a con/de/structor, and
82 multiple function decls, each with a unique parameter list.
83 Duplicate the body, using the given splay tree; some parameters
84 will become constants (like 0 or 1).
86 Versioning: a function body is duplicated and the result is a new
87 function rather than into blocks of an existing function as with
88 inlining. Some parameters will become constants.
90 Parallelization: a region of a function is duplicated resulting in
91 a new function. Variables may be replaced with complex expressions
92 to enable shared variable semantics.
94 All of these will simultaneously lookup any callgraph edges. If
95 we're going to inline the duplicated function body, and the given
96 function has some cloned callgraph nodes (one for each place this
97 function will be inlined) those callgraph edges will be duplicated.
98 If we're cloning the body, those callgraph edges will be
99 updated to point into the new body. (Note that the original
100 callgraph node and edge list will not be altered.)
102 See the CALL_EXPR handling case in copy_tree_body_r (). */
104 /* To Do:
106 o In order to make inlining-on-trees work, we pessimized
107 function-local static constants. In particular, they are now
108 always output, even when not addressed. Fix this by treating
109 function-local static constants just like global static
110 constants; the back-end already knows not to output them if they
111 are not needed.
113 o Provide heuristics to clamp inlining of recursive template
114 calls? */
117 /* Weights that estimate_num_insns uses to estimate the size of the
118 produced code. */
120 eni_weights eni_size_weights;
122 /* Weights that estimate_num_insns uses to estimate the time necessary
123 to execute the produced code. */
125 eni_weights eni_time_weights;
127 /* Prototypes. */
129 static tree declare_return_variable (copy_body_data *, tree, tree,
130 basic_block);
131 static void remap_block (tree *, copy_body_data *);
132 static void copy_bind_expr (tree *, int *, copy_body_data *);
133 static void declare_inline_vars (tree, tree);
134 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
135 static void prepend_lexical_block (tree current_block, tree new_block);
136 static tree copy_result_decl_to_var (tree, copy_body_data *);
137 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
138 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
139 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
141 /* Insert a tree->tree mapping for ID. Despite the name suggests
142 that the trees should be variables, it is used for more than that. */
144 void
145 insert_decl_map (copy_body_data *id, tree key, tree value)
147 id->decl_map->put (key, value);
149 /* Always insert an identity map as well. If we see this same new
150 node again, we won't want to duplicate it a second time. */
151 if (key != value)
152 id->decl_map->put (value, value);
155 /* Insert a tree->tree mapping for ID. This is only used for
156 variables. */
158 static void
159 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
161 if (!gimple_in_ssa_p (id->src_cfun))
162 return;
164 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
165 return;
167 if (!target_for_debug_bind (key))
168 return;
170 gcc_assert (TREE_CODE (key) == PARM_DECL);
171 gcc_assert (VAR_P (value));
173 if (!id->debug_map)
174 id->debug_map = new hash_map<tree, tree>;
176 id->debug_map->put (key, value);
179 /* If nonzero, we're remapping the contents of inlined debug
180 statements. If negative, an error has occurred, such as a
181 reference to a variable that isn't available in the inlined
182 context. */
183 static int processing_debug_stmt = 0;
185 /* Construct new SSA name for old NAME. ID is the inline context. */
187 static tree
188 remap_ssa_name (tree name, copy_body_data *id)
190 tree new_tree, var;
191 tree *n;
193 gcc_assert (TREE_CODE (name) == SSA_NAME);
195 n = id->decl_map->get (name);
196 if (n)
198 /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
199 remove an unused LHS from a call statement. Such LHS can however
200 still appear in debug statements, but their value is lost in this
201 function and we do not want to map them. */
202 if (id->killed_new_ssa_names
203 && id->killed_new_ssa_names->contains (*n))
205 gcc_assert (processing_debug_stmt);
206 processing_debug_stmt = -1;
207 return name;
210 return unshare_expr (*n);
213 if (processing_debug_stmt)
215 if (SSA_NAME_IS_DEFAULT_DEF (name)
216 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
217 && id->entry_bb == NULL
218 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
220 tree vexpr = make_node (DEBUG_EXPR_DECL);
221 gimple *def_temp;
222 gimple_stmt_iterator gsi;
223 tree val = SSA_NAME_VAR (name);
225 n = id->decl_map->get (val);
226 if (n != NULL)
227 val = *n;
228 if (TREE_CODE (val) != PARM_DECL
229 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
231 processing_debug_stmt = -1;
232 return name;
234 n = id->decl_map->get (val);
235 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
236 return *n;
237 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
238 DECL_ARTIFICIAL (vexpr) = 1;
239 TREE_TYPE (vexpr) = TREE_TYPE (name);
240 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
241 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
242 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
243 insert_decl_map (id, val, vexpr);
244 return vexpr;
247 processing_debug_stmt = -1;
248 return name;
251 /* Remap anonymous SSA names or SSA names of anonymous decls. */
252 var = SSA_NAME_VAR (name);
253 if (!var
254 || (!SSA_NAME_IS_DEFAULT_DEF (name)
255 && VAR_P (var)
256 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
257 && DECL_ARTIFICIAL (var)
258 && DECL_IGNORED_P (var)
259 && !DECL_NAME (var)))
261 struct ptr_info_def *pi;
262 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
263 if (!var && SSA_NAME_IDENTIFIER (name))
264 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
265 insert_decl_map (id, name, new_tree);
266 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
267 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
268 /* At least IPA points-to info can be directly transferred. */
269 if (id->src_cfun->gimple_df
270 && id->src_cfun->gimple_df->ipa_pta
271 && POINTER_TYPE_P (TREE_TYPE (name))
272 && (pi = SSA_NAME_PTR_INFO (name))
273 && !pi->pt.anything)
275 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
276 new_pi->pt = pi->pt;
278 /* So can range-info. */
279 if (!POINTER_TYPE_P (TREE_TYPE (name))
280 && SSA_NAME_RANGE_INFO (name))
281 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
282 SSA_NAME_RANGE_INFO (name));
283 return new_tree;
286 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
287 in copy_bb. */
288 new_tree = remap_decl (var, id);
290 /* We might've substituted constant or another SSA_NAME for
291 the variable.
293 Replace the SSA name representing RESULT_DECL by variable during
294 inlining: this saves us from need to introduce PHI node in a case
295 return value is just partly initialized. */
296 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
297 && (!SSA_NAME_VAR (name)
298 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
299 || !id->transform_return_to_modify))
301 struct ptr_info_def *pi;
302 new_tree = make_ssa_name (new_tree);
303 insert_decl_map (id, name, new_tree);
304 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
305 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
306 /* At least IPA points-to info can be directly transferred. */
307 if (id->src_cfun->gimple_df
308 && id->src_cfun->gimple_df->ipa_pta
309 && POINTER_TYPE_P (TREE_TYPE (name))
310 && (pi = SSA_NAME_PTR_INFO (name))
311 && !pi->pt.anything)
313 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
314 new_pi->pt = pi->pt;
316 /* So can range-info. */
317 if (!POINTER_TYPE_P (TREE_TYPE (name))
318 && SSA_NAME_RANGE_INFO (name))
319 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
320 SSA_NAME_RANGE_INFO (name));
321 if (SSA_NAME_IS_DEFAULT_DEF (name))
323 /* By inlining function having uninitialized variable, we might
324 extend the lifetime (variable might get reused). This cause
325 ICE in the case we end up extending lifetime of SSA name across
326 abnormal edge, but also increase register pressure.
328 We simply initialize all uninitialized vars by 0 except
329 for case we are inlining to very first BB. We can avoid
330 this for all BBs that are not inside strongly connected
331 regions of the CFG, but this is expensive to test. */
332 if (id->entry_bb
333 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
334 && (!SSA_NAME_VAR (name)
335 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
336 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
337 0)->dest
338 || EDGE_COUNT (id->entry_bb->preds) != 1))
340 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
341 gimple *init_stmt;
342 tree zero = build_zero_cst (TREE_TYPE (new_tree));
344 init_stmt = gimple_build_assign (new_tree, zero);
345 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
346 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
348 else
350 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
351 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
355 else
356 insert_decl_map (id, name, new_tree);
357 return new_tree;
360 /* Remap DECL during the copying of the BLOCK tree for the function. */
362 tree
363 remap_decl (tree decl, copy_body_data *id)
365 tree *n;
367 /* We only remap local variables in the current function. */
369 /* See if we have remapped this declaration. */
371 n = id->decl_map->get (decl);
373 if (!n && processing_debug_stmt)
375 processing_debug_stmt = -1;
376 return decl;
379 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
380 necessary DECLs have already been remapped and we do not want to duplicate
381 a decl coming from outside of the sequence we are copying. */
382 if (!n
383 && id->prevent_decl_creation_for_types
384 && id->remapping_type_depth > 0
385 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
386 return decl;
388 /* If we didn't already have an equivalent for this declaration, create one
389 now. */
390 if (!n)
392 /* Make a copy of the variable or label. */
393 tree t = id->copy_decl (decl, id);
395 /* Remember it, so that if we encounter this local entity again
396 we can reuse this copy. Do this early because remap_type may
397 need this decl for TYPE_STUB_DECL. */
398 insert_decl_map (id, decl, t);
400 if (!DECL_P (t))
401 return t;
403 /* Remap types, if necessary. */
404 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
405 if (TREE_CODE (t) == TYPE_DECL)
407 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
409 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
410 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
411 is not set on the TYPE_DECL, for example in LTO mode. */
412 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
414 tree x = build_variant_type_copy (TREE_TYPE (t));
415 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
416 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
417 DECL_ORIGINAL_TYPE (t) = x;
421 /* Remap sizes as necessary. */
422 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
423 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
425 /* If fields, do likewise for offset and qualifier. */
426 if (TREE_CODE (t) == FIELD_DECL)
428 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
429 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
430 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
433 return t;
436 if (id->do_not_unshare)
437 return *n;
438 else
439 return unshare_expr (*n);
442 static tree
443 remap_type_1 (tree type, copy_body_data *id)
445 tree new_tree, t;
447 /* We do need a copy. build and register it now. If this is a pointer or
448 reference type, remap the designated type and make a new pointer or
449 reference type. */
450 if (TREE_CODE (type) == POINTER_TYPE)
452 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
453 TYPE_MODE (type),
454 TYPE_REF_CAN_ALIAS_ALL (type));
455 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
456 new_tree = build_type_attribute_qual_variant (new_tree,
457 TYPE_ATTRIBUTES (type),
458 TYPE_QUALS (type));
459 insert_decl_map (id, type, new_tree);
460 return new_tree;
462 else if (TREE_CODE (type) == REFERENCE_TYPE)
464 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
465 TYPE_MODE (type),
466 TYPE_REF_CAN_ALIAS_ALL (type));
467 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
468 new_tree = build_type_attribute_qual_variant (new_tree,
469 TYPE_ATTRIBUTES (type),
470 TYPE_QUALS (type));
471 insert_decl_map (id, type, new_tree);
472 return new_tree;
474 else
475 new_tree = copy_node (type);
477 insert_decl_map (id, type, new_tree);
479 /* This is a new type, not a copy of an old type. Need to reassociate
480 variants. We can handle everything except the main variant lazily. */
481 t = TYPE_MAIN_VARIANT (type);
482 if (type != t)
484 t = remap_type (t, id);
485 TYPE_MAIN_VARIANT (new_tree) = t;
486 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
487 TYPE_NEXT_VARIANT (t) = new_tree;
489 else
491 TYPE_MAIN_VARIANT (new_tree) = new_tree;
492 TYPE_NEXT_VARIANT (new_tree) = NULL;
495 if (TYPE_STUB_DECL (type))
496 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
498 /* Lazily create pointer and reference types. */
499 TYPE_POINTER_TO (new_tree) = NULL;
500 TYPE_REFERENCE_TO (new_tree) = NULL;
502 /* Copy all types that may contain references to local variables; be sure to
503 preserve sharing in between type and its main variant when possible. */
504 switch (TREE_CODE (new_tree))
506 case INTEGER_TYPE:
507 case REAL_TYPE:
508 case FIXED_POINT_TYPE:
509 case ENUMERAL_TYPE:
510 case BOOLEAN_TYPE:
511 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
513 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
514 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
516 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
517 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
519 else
521 t = TYPE_MIN_VALUE (new_tree);
522 if (t && TREE_CODE (t) != INTEGER_CST)
523 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
525 t = TYPE_MAX_VALUE (new_tree);
526 if (t && TREE_CODE (t) != INTEGER_CST)
527 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
529 return new_tree;
531 case FUNCTION_TYPE:
532 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
533 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
534 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
535 else
536 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
537 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
538 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
539 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
540 else
541 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
542 return new_tree;
544 case ARRAY_TYPE:
545 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
546 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
547 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
548 else
549 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
551 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
553 gcc_checking_assert (TYPE_DOMAIN (type)
554 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
555 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
557 else
559 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
560 /* For array bounds where we have decided not to copy over the bounds
561 variable which isn't used in OpenMP/OpenACC region, change them to
562 an uninitialized VAR_DECL temporary. */
563 if (id->adjust_array_error_bounds
564 && TYPE_DOMAIN (new_tree)
565 && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
566 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
568 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
569 DECL_ATTRIBUTES (v)
570 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
571 DECL_ATTRIBUTES (v));
572 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
575 break;
577 case RECORD_TYPE:
578 case UNION_TYPE:
579 case QUAL_UNION_TYPE:
580 if (TYPE_MAIN_VARIANT (type) != type
581 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
582 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
583 else
585 tree f, nf = NULL;
587 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
589 t = remap_decl (f, id);
590 DECL_CONTEXT (t) = new_tree;
591 DECL_CHAIN (t) = nf;
592 nf = t;
594 TYPE_FIELDS (new_tree) = nreverse (nf);
596 break;
598 case OFFSET_TYPE:
599 default:
600 /* Shouldn't have been thought variable sized. */
601 gcc_unreachable ();
604 /* All variants of type share the same size, so use the already remaped data. */
605 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
607 tree s = TYPE_SIZE (type);
608 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
609 tree su = TYPE_SIZE_UNIT (type);
610 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
611 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
612 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
613 || s == mvs);
614 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
615 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
616 || su == mvsu);
617 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
618 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
620 else
622 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
623 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
626 return new_tree;
629 /* Helper function for remap_type_2, called through walk_tree. */
631 static tree
632 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
634 copy_body_data *id = (copy_body_data *) data;
636 if (TYPE_P (*tp))
637 *walk_subtrees = 0;
639 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
640 return *tp;
642 return NULL_TREE;
645 /* Return true if TYPE needs to be remapped because remap_decl on any
646 needed embedded decl returns something other than that decl. */
648 static bool
649 remap_type_2 (tree type, copy_body_data *id)
651 tree t;
653 #define RETURN_TRUE_IF_VAR(T) \
654 do \
656 tree _t = (T); \
657 if (_t) \
659 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
660 return true; \
661 if (!TYPE_SIZES_GIMPLIFIED (type) \
662 && walk_tree (&_t, remap_type_3, id, NULL)) \
663 return true; \
666 while (0)
668 switch (TREE_CODE (type))
670 case POINTER_TYPE:
671 case REFERENCE_TYPE:
672 case FUNCTION_TYPE:
673 case METHOD_TYPE:
674 return remap_type_2 (TREE_TYPE (type), id);
676 case INTEGER_TYPE:
677 case REAL_TYPE:
678 case FIXED_POINT_TYPE:
679 case ENUMERAL_TYPE:
680 case BOOLEAN_TYPE:
681 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
682 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
683 return false;
685 case ARRAY_TYPE:
686 if (remap_type_2 (TREE_TYPE (type), id)
687 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
688 return true;
689 break;
691 case RECORD_TYPE:
692 case UNION_TYPE:
693 case QUAL_UNION_TYPE:
694 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
695 if (TREE_CODE (t) == FIELD_DECL)
697 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
698 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
699 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
700 if (TREE_CODE (type) == QUAL_UNION_TYPE)
701 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
703 break;
705 default:
706 return false;
709 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
710 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
711 return false;
712 #undef RETURN_TRUE_IF_VAR
715 tree
716 remap_type (tree type, copy_body_data *id)
718 tree *node;
719 tree tmp;
721 if (type == NULL)
722 return type;
724 /* See if we have remapped this type. */
725 node = id->decl_map->get (type);
726 if (node)
727 return *node;
729 /* The type only needs remapping if it's variably modified. */
730 if (! variably_modified_type_p (type, id->src_fn)
731 /* Don't remap if copy_decl method doesn't always return a new
732 decl and for all embedded decls returns the passed in decl. */
733 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
735 insert_decl_map (id, type, type);
736 return type;
739 id->remapping_type_depth++;
740 tmp = remap_type_1 (type, id);
741 id->remapping_type_depth--;
743 return tmp;
746 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
748 static bool
749 can_be_nonlocal (tree decl, copy_body_data *id)
751 /* We cannot duplicate function decls. */
752 if (TREE_CODE (decl) == FUNCTION_DECL)
753 return true;
755 /* Local static vars must be non-local or we get multiple declaration
756 problems. */
757 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
758 return true;
760 return false;
763 static tree
764 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
765 copy_body_data *id)
767 tree old_var;
768 tree new_decls = NULL_TREE;
770 /* Remap its variables. */
771 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
773 tree new_var;
775 if (can_be_nonlocal (old_var, id))
777 /* We need to add this variable to the local decls as otherwise
778 nothing else will do so. */
779 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
780 add_local_decl (cfun, old_var);
781 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
782 && !DECL_IGNORED_P (old_var)
783 && nonlocalized_list)
784 vec_safe_push (*nonlocalized_list, old_var);
785 continue;
788 /* Remap the variable. */
789 new_var = remap_decl (old_var, id);
791 /* If we didn't remap this variable, we can't mess with its
792 TREE_CHAIN. If we remapped this variable to the return slot, it's
793 already declared somewhere else, so don't declare it here. */
795 if (new_var == id->retvar)
797 else if (!new_var)
799 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
800 && !DECL_IGNORED_P (old_var)
801 && nonlocalized_list)
802 vec_safe_push (*nonlocalized_list, old_var);
804 else
806 gcc_assert (DECL_P (new_var));
807 DECL_CHAIN (new_var) = new_decls;
808 new_decls = new_var;
810 /* Also copy value-expressions. */
811 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
813 tree tem = DECL_VALUE_EXPR (new_var);
814 bool old_regimplify = id->regimplify;
815 id->remapping_type_depth++;
816 walk_tree (&tem, copy_tree_body_r, id, NULL);
817 id->remapping_type_depth--;
818 id->regimplify = old_regimplify;
819 SET_DECL_VALUE_EXPR (new_var, tem);
824 return nreverse (new_decls);
827 /* Copy the BLOCK to contain remapped versions of the variables
828 therein. And hook the new block into the block-tree. */
830 static void
831 remap_block (tree *block, copy_body_data *id)
833 tree old_block;
834 tree new_block;
836 /* Make the new block. */
837 old_block = *block;
838 new_block = make_node (BLOCK);
839 TREE_USED (new_block) = TREE_USED (old_block);
840 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
841 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
842 BLOCK_NONLOCALIZED_VARS (new_block)
843 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
844 *block = new_block;
846 /* Remap its variables. */
847 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
848 &BLOCK_NONLOCALIZED_VARS (new_block),
849 id);
851 if (id->transform_lang_insert_block)
852 id->transform_lang_insert_block (new_block);
854 /* Remember the remapped block. */
855 insert_decl_map (id, old_block, new_block);
858 /* Copy the whole block tree and root it in id->block. */
860 static tree
861 remap_blocks (tree block, copy_body_data *id)
863 tree t;
864 tree new_tree = block;
866 if (!block)
867 return NULL;
869 remap_block (&new_tree, id);
870 gcc_assert (new_tree != block);
871 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
872 prepend_lexical_block (new_tree, remap_blocks (t, id));
873 /* Blocks are in arbitrary order, but make things slightly prettier and do
874 not swap order when producing a copy. */
875 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
876 return new_tree;
879 /* Remap the block tree rooted at BLOCK to nothing. */
881 static void
882 remap_blocks_to_null (tree block, copy_body_data *id)
884 tree t;
885 insert_decl_map (id, block, NULL_TREE);
886 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
887 remap_blocks_to_null (t, id);
890 /* Remap the location info pointed to by LOCUS. */
892 static location_t
893 remap_location (location_t locus, copy_body_data *id)
895 if (LOCATION_BLOCK (locus))
897 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
898 gcc_assert (n);
899 if (*n)
900 return set_block (locus, *n);
903 locus = LOCATION_LOCUS (locus);
905 if (locus != UNKNOWN_LOCATION && id->block)
906 return set_block (locus, id->block);
908 return locus;
911 static void
912 copy_statement_list (tree *tp)
914 tree_stmt_iterator oi, ni;
915 tree new_tree;
917 new_tree = alloc_stmt_list ();
918 ni = tsi_start (new_tree);
919 oi = tsi_start (*tp);
920 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
921 *tp = new_tree;
923 for (; !tsi_end_p (oi); tsi_next (&oi))
925 tree stmt = tsi_stmt (oi);
926 if (TREE_CODE (stmt) == STATEMENT_LIST)
927 /* This copy is not redundant; tsi_link_after will smash this
928 STATEMENT_LIST into the end of the one we're building, and we
929 don't want to do that with the original. */
930 copy_statement_list (&stmt);
931 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
935 static void
936 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
938 tree block = BIND_EXPR_BLOCK (*tp);
939 /* Copy (and replace) the statement. */
940 copy_tree_r (tp, walk_subtrees, NULL);
941 if (block)
943 remap_block (&block, id);
944 BIND_EXPR_BLOCK (*tp) = block;
947 if (BIND_EXPR_VARS (*tp))
948 /* This will remap a lot of the same decls again, but this should be
949 harmless. */
950 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
954 /* Create a new gimple_seq by remapping all the statements in BODY
955 using the inlining information in ID. */
957 static gimple_seq
958 remap_gimple_seq (gimple_seq body, copy_body_data *id)
960 gimple_stmt_iterator si;
961 gimple_seq new_body = NULL;
963 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
965 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
966 gimple_seq_add_seq (&new_body, new_stmts);
969 return new_body;
973 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
974 block using the mapping information in ID. */
976 static gimple *
977 copy_gimple_bind (gbind *stmt, copy_body_data *id)
979 gimple *new_bind;
980 tree new_block, new_vars;
981 gimple_seq body, new_body;
983 /* Copy the statement. Note that we purposely don't use copy_stmt
984 here because we need to remap statements as we copy. */
985 body = gimple_bind_body (stmt);
986 new_body = remap_gimple_seq (body, id);
988 new_block = gimple_bind_block (stmt);
989 if (new_block)
990 remap_block (&new_block, id);
992 /* This will remap a lot of the same decls again, but this should be
993 harmless. */
994 new_vars = gimple_bind_vars (stmt);
995 if (new_vars)
996 new_vars = remap_decls (new_vars, NULL, id);
998 new_bind = gimple_build_bind (new_vars, new_body, new_block);
1000 return new_bind;
1003 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
1005 static bool
1006 is_parm (tree decl)
1008 if (TREE_CODE (decl) == SSA_NAME)
1010 decl = SSA_NAME_VAR (decl);
1011 if (!decl)
1012 return false;
1015 return (TREE_CODE (decl) == PARM_DECL);
1018 /* Remap the dependence CLIQUE from the source to the destination function
1019 as specified in ID. */
1021 static unsigned short
1022 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1024 if (clique == 0 || processing_debug_stmt)
1025 return 0;
1026 if (!id->dependence_map)
1027 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1028 bool existed;
1029 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1030 if (!existed)
1032 /* Clique 1 is reserved for local ones set by PTA. */
1033 if (cfun->last_clique == 0)
1034 cfun->last_clique = 1;
1035 newc = ++cfun->last_clique;
1037 return newc;
1040 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1041 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1042 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1043 recursing into the children nodes of *TP. */
1045 static tree
1046 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1048 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1049 copy_body_data *id = (copy_body_data *) wi_p->info;
1050 tree fn = id->src_fn;
1052 /* For recursive invocations this is no longer the LHS itself. */
1053 bool is_lhs = wi_p->is_lhs;
1054 wi_p->is_lhs = false;
1056 if (TREE_CODE (*tp) == SSA_NAME)
1058 *tp = remap_ssa_name (*tp, id);
1059 *walk_subtrees = 0;
1060 if (is_lhs)
1061 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1062 return NULL;
1064 else if (auto_var_in_fn_p (*tp, fn))
1066 /* Local variables and labels need to be replaced by equivalent
1067 variables. We don't want to copy static variables; there's
1068 only one of those, no matter how many times we inline the
1069 containing function. Similarly for globals from an outer
1070 function. */
1071 tree new_decl;
1073 /* Remap the declaration. */
1074 new_decl = remap_decl (*tp, id);
1075 gcc_assert (new_decl);
1076 /* Replace this variable with the copy. */
1077 STRIP_TYPE_NOPS (new_decl);
1078 /* ??? The C++ frontend uses void * pointer zero to initialize
1079 any other type. This confuses the middle-end type verification.
1080 As cloned bodies do not go through gimplification again the fixup
1081 there doesn't trigger. */
1082 if (TREE_CODE (new_decl) == INTEGER_CST
1083 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1084 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1085 *tp = new_decl;
1086 *walk_subtrees = 0;
1088 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1089 gcc_unreachable ();
1090 else if (TREE_CODE (*tp) == SAVE_EXPR)
1091 gcc_unreachable ();
1092 else if (TREE_CODE (*tp) == LABEL_DECL
1093 && (!DECL_CONTEXT (*tp)
1094 || decl_function_context (*tp) == id->src_fn))
1095 /* These may need to be remapped for EH handling. */
1096 *tp = remap_decl (*tp, id);
1097 else if (TREE_CODE (*tp) == FIELD_DECL)
1099 /* If the enclosing record type is variably_modified_type_p, the field
1100 has already been remapped. Otherwise, it need not be. */
1101 tree *n = id->decl_map->get (*tp);
1102 if (n)
1103 *tp = *n;
1104 *walk_subtrees = 0;
1106 else if (TYPE_P (*tp))
1107 /* Types may need remapping as well. */
1108 *tp = remap_type (*tp, id);
1109 else if (CONSTANT_CLASS_P (*tp))
1111 /* If this is a constant, we have to copy the node iff the type
1112 will be remapped. copy_tree_r will not copy a constant. */
1113 tree new_type = remap_type (TREE_TYPE (*tp), id);
1115 if (new_type == TREE_TYPE (*tp))
1116 *walk_subtrees = 0;
1118 else if (TREE_CODE (*tp) == INTEGER_CST)
1119 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1120 else
1122 *tp = copy_node (*tp);
1123 TREE_TYPE (*tp) = new_type;
1126 else
1128 /* Otherwise, just copy the node. Note that copy_tree_r already
1129 knows not to copy VAR_DECLs, etc., so this is safe. */
1131 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1133 /* We need to re-canonicalize MEM_REFs from inline substitutions
1134 that can happen when a pointer argument is an ADDR_EXPR.
1135 Recurse here manually to allow that. */
1136 tree ptr = TREE_OPERAND (*tp, 0);
1137 tree type = remap_type (TREE_TYPE (*tp), id);
1138 tree old = *tp;
1139 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1140 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1141 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1142 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1143 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1144 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1146 MR_DEPENDENCE_CLIQUE (*tp)
1147 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1148 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1150 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1151 remapped a parameter as the property might be valid only
1152 for the parameter itself. */
1153 if (TREE_THIS_NOTRAP (old)
1154 && (!is_parm (TREE_OPERAND (old, 0))
1155 || (!id->transform_parameter && is_parm (ptr))))
1156 TREE_THIS_NOTRAP (*tp) = 1;
1157 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1158 *walk_subtrees = 0;
1159 return NULL;
1162 /* Here is the "usual case". Copy this tree node, and then
1163 tweak some special cases. */
1164 copy_tree_r (tp, walk_subtrees, NULL);
1166 if (TREE_CODE (*tp) != OMP_CLAUSE)
1167 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1169 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1171 /* The copied TARGET_EXPR has never been expanded, even if the
1172 original node was expanded already. */
1173 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1174 TREE_OPERAND (*tp, 3) = NULL_TREE;
1176 else if (TREE_CODE (*tp) == ADDR_EXPR)
1178 /* Variable substitution need not be simple. In particular,
1179 the MEM_REF substitution above. Make sure that
1180 TREE_CONSTANT and friends are up-to-date. */
1181 int invariant = is_gimple_min_invariant (*tp);
1182 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1183 recompute_tree_invariant_for_addr_expr (*tp);
1185 /* If this used to be invariant, but is not any longer,
1186 then regimplification is probably needed. */
1187 if (invariant && !is_gimple_min_invariant (*tp))
1188 id->regimplify = true;
1190 *walk_subtrees = 0;
1194 /* Update the TREE_BLOCK for the cloned expr. */
1195 if (EXPR_P (*tp))
1197 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1198 tree old_block = TREE_BLOCK (*tp);
1199 if (old_block)
1201 tree *n;
1202 n = id->decl_map->get (TREE_BLOCK (*tp));
1203 if (n)
1204 new_block = *n;
1206 TREE_SET_BLOCK (*tp, new_block);
1209 /* Keep iterating. */
1210 return NULL_TREE;
1214 /* Called from copy_body_id via walk_tree. DATA is really a
1215 `copy_body_data *'. */
1217 tree
1218 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1220 copy_body_data *id = (copy_body_data *) data;
1221 tree fn = id->src_fn;
1222 tree new_block;
1224 /* Begin by recognizing trees that we'll completely rewrite for the
1225 inlining context. Our output for these trees is completely
1226 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1227 into an edge). Further down, we'll handle trees that get
1228 duplicated and/or tweaked. */
1230 /* When requested, RETURN_EXPRs should be transformed to just the
1231 contained MODIFY_EXPR. The branch semantics of the return will
1232 be handled elsewhere by manipulating the CFG rather than a statement. */
1233 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1235 tree assignment = TREE_OPERAND (*tp, 0);
1237 /* If we're returning something, just turn that into an
1238 assignment into the equivalent of the original RESULT_DECL.
1239 If the "assignment" is just the result decl, the result
1240 decl has already been set (e.g. a recent "foo (&result_decl,
1241 ...)"); just toss the entire RETURN_EXPR. */
1242 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1244 /* Replace the RETURN_EXPR with (a copy of) the
1245 MODIFY_EXPR hanging underneath. */
1246 *tp = copy_node (assignment);
1248 else /* Else the RETURN_EXPR returns no value. */
1250 *tp = NULL;
1251 return (tree) (void *)1;
1254 else if (TREE_CODE (*tp) == SSA_NAME)
1256 *tp = remap_ssa_name (*tp, id);
1257 *walk_subtrees = 0;
1258 return NULL;
1261 /* Local variables and labels need to be replaced by equivalent
1262 variables. We don't want to copy static variables; there's only
1263 one of those, no matter how many times we inline the containing
1264 function. Similarly for globals from an outer function. */
1265 else if (auto_var_in_fn_p (*tp, fn))
1267 tree new_decl;
1269 /* Remap the declaration. */
1270 new_decl = remap_decl (*tp, id);
1271 gcc_assert (new_decl);
1272 /* Replace this variable with the copy. */
1273 STRIP_TYPE_NOPS (new_decl);
1274 *tp = new_decl;
1275 *walk_subtrees = 0;
1277 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1278 copy_statement_list (tp);
1279 else if (TREE_CODE (*tp) == SAVE_EXPR
1280 || TREE_CODE (*tp) == TARGET_EXPR)
1281 remap_save_expr (tp, id->decl_map, walk_subtrees);
1282 else if (TREE_CODE (*tp) == LABEL_DECL
1283 && (! DECL_CONTEXT (*tp)
1284 || decl_function_context (*tp) == id->src_fn))
1285 /* These may need to be remapped for EH handling. */
1286 *tp = remap_decl (*tp, id);
1287 else if (TREE_CODE (*tp) == BIND_EXPR)
1288 copy_bind_expr (tp, walk_subtrees, id);
1289 /* Types may need remapping as well. */
1290 else if (TYPE_P (*tp))
1291 *tp = remap_type (*tp, id);
1293 /* If this is a constant, we have to copy the node iff the type will be
1294 remapped. copy_tree_r will not copy a constant. */
1295 else if (CONSTANT_CLASS_P (*tp))
1297 tree new_type = remap_type (TREE_TYPE (*tp), id);
1299 if (new_type == TREE_TYPE (*tp))
1300 *walk_subtrees = 0;
1302 else if (TREE_CODE (*tp) == INTEGER_CST)
1303 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1304 else
1306 *tp = copy_node (*tp);
1307 TREE_TYPE (*tp) = new_type;
1311 /* Otherwise, just copy the node. Note that copy_tree_r already
1312 knows not to copy VAR_DECLs, etc., so this is safe. */
1313 else
1315 /* Here we handle trees that are not completely rewritten.
1316 First we detect some inlining-induced bogosities for
1317 discarding. */
1318 if (TREE_CODE (*tp) == MODIFY_EXPR
1319 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1320 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1322 /* Some assignments VAR = VAR; don't generate any rtl code
1323 and thus don't count as variable modification. Avoid
1324 keeping bogosities like 0 = 0. */
1325 tree decl = TREE_OPERAND (*tp, 0), value;
1326 tree *n;
1328 n = id->decl_map->get (decl);
1329 if (n)
1331 value = *n;
1332 STRIP_TYPE_NOPS (value);
1333 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1335 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1336 return copy_tree_body_r (tp, walk_subtrees, data);
1340 else if (TREE_CODE (*tp) == INDIRECT_REF)
1342 /* Get rid of *& from inline substitutions that can happen when a
1343 pointer argument is an ADDR_EXPR. */
1344 tree decl = TREE_OPERAND (*tp, 0);
1345 tree *n = id->decl_map->get (decl);
1346 if (n)
1348 /* If we happen to get an ADDR_EXPR in n->value, strip
1349 it manually here as we'll eventually get ADDR_EXPRs
1350 which lie about their types pointed to. In this case
1351 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1352 but we absolutely rely on that. As fold_indirect_ref
1353 does other useful transformations, try that first, though. */
1354 tree type = TREE_TYPE (*tp);
1355 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1356 tree old = *tp;
1357 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1358 if (! *tp)
1360 type = remap_type (type, id);
1361 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1364 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1365 /* ??? We should either assert here or build
1366 a VIEW_CONVERT_EXPR instead of blindly leaking
1367 incompatible types to our IL. */
1368 if (! *tp)
1369 *tp = TREE_OPERAND (ptr, 0);
1371 else
1373 *tp = build1 (INDIRECT_REF, type, ptr);
1374 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1375 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1376 TREE_READONLY (*tp) = TREE_READONLY (old);
1377 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1378 have remapped a parameter as the property might be
1379 valid only for the parameter itself. */
1380 if (TREE_THIS_NOTRAP (old)
1381 && (!is_parm (TREE_OPERAND (old, 0))
1382 || (!id->transform_parameter && is_parm (ptr))))
1383 TREE_THIS_NOTRAP (*tp) = 1;
1386 *walk_subtrees = 0;
1387 return NULL;
1390 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1392 /* We need to re-canonicalize MEM_REFs from inline substitutions
1393 that can happen when a pointer argument is an ADDR_EXPR.
1394 Recurse here manually to allow that. */
1395 tree ptr = TREE_OPERAND (*tp, 0);
1396 tree type = remap_type (TREE_TYPE (*tp), id);
1397 tree old = *tp;
1398 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1399 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1400 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1401 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1402 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1403 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1405 MR_DEPENDENCE_CLIQUE (*tp)
1406 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1407 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1409 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1410 remapped a parameter as the property might be valid only
1411 for the parameter itself. */
1412 if (TREE_THIS_NOTRAP (old)
1413 && (!is_parm (TREE_OPERAND (old, 0))
1414 || (!id->transform_parameter && is_parm (ptr))))
1415 TREE_THIS_NOTRAP (*tp) = 1;
1416 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1417 *walk_subtrees = 0;
1418 return NULL;
1421 /* Here is the "usual case". Copy this tree node, and then
1422 tweak some special cases. */
1423 copy_tree_r (tp, walk_subtrees, NULL);
1425 /* If EXPR has block defined, map it to newly constructed block.
1426 When inlining we want EXPRs without block appear in the block
1427 of function call if we are not remapping a type. */
1428 if (EXPR_P (*tp))
1430 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1431 if (TREE_BLOCK (*tp))
1433 tree *n;
1434 n = id->decl_map->get (TREE_BLOCK (*tp));
1435 if (n)
1436 new_block = *n;
1438 TREE_SET_BLOCK (*tp, new_block);
1441 if (TREE_CODE (*tp) != OMP_CLAUSE)
1442 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1444 /* The copied TARGET_EXPR has never been expanded, even if the
1445 original node was expanded already. */
1446 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1448 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1449 TREE_OPERAND (*tp, 3) = NULL_TREE;
1452 /* Variable substitution need not be simple. In particular, the
1453 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1454 and friends are up-to-date. */
1455 else if (TREE_CODE (*tp) == ADDR_EXPR)
1457 int invariant = is_gimple_min_invariant (*tp);
1458 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1460 /* Handle the case where we substituted an INDIRECT_REF
1461 into the operand of the ADDR_EXPR. */
1462 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1463 && !id->do_not_fold)
1465 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1466 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1467 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1468 *tp = t;
1470 else
1471 recompute_tree_invariant_for_addr_expr (*tp);
1473 /* If this used to be invariant, but is not any longer,
1474 then regimplification is probably needed. */
1475 if (invariant && !is_gimple_min_invariant (*tp))
1476 id->regimplify = true;
1478 *walk_subtrees = 0;
1482 /* Keep iterating. */
1483 return NULL_TREE;
1486 /* Helper for remap_gimple_stmt. Given an EH region number for the
1487 source function, map that to the duplicate EH region number in
1488 the destination function. */
1490 static int
1491 remap_eh_region_nr (int old_nr, copy_body_data *id)
1493 eh_region old_r, new_r;
1495 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1496 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1498 return new_r->index;
1501 /* Similar, but operate on INTEGER_CSTs. */
1503 static tree
1504 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1506 int old_nr, new_nr;
1508 old_nr = tree_to_shwi (old_t_nr);
1509 new_nr = remap_eh_region_nr (old_nr, id);
1511 return build_int_cst (integer_type_node, new_nr);
1514 /* Helper for copy_bb. Remap statement STMT using the inlining
1515 information in ID. Return the new statement copy. */
1517 static gimple_seq
1518 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1520 gimple *copy = NULL;
1521 struct walk_stmt_info wi;
1522 bool skip_first = false;
1523 gimple_seq stmts = NULL;
1525 if (is_gimple_debug (stmt)
1526 && (gimple_debug_nonbind_marker_p (stmt)
1527 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1528 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1529 return NULL;
1531 /* Begin by recognizing trees that we'll completely rewrite for the
1532 inlining context. Our output for these trees is completely
1533 different from our input (e.g. RETURN_EXPR is deleted and morphs
1534 into an edge). Further down, we'll handle trees that get
1535 duplicated and/or tweaked. */
1537 /* When requested, GIMPLE_RETURN should be transformed to just the
1538 contained GIMPLE_ASSIGN. The branch semantics of the return will
1539 be handled elsewhere by manipulating the CFG rather than the
1540 statement. */
1541 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1543 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1545 /* If we're returning something, just turn that into an
1546 assignment to the equivalent of the original RESULT_DECL.
1547 If RETVAL is just the result decl, the result decl has
1548 already been set (e.g. a recent "foo (&result_decl, ...)");
1549 just toss the entire GIMPLE_RETURN. Likewise for when the
1550 call doesn't want the return value. */
1551 if (retval
1552 && (TREE_CODE (retval) != RESULT_DECL
1553 && (!id->call_stmt
1554 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1555 && (TREE_CODE (retval) != SSA_NAME
1556 || ! SSA_NAME_VAR (retval)
1557 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1559 copy = gimple_build_assign (id->do_not_unshare
1560 ? id->retvar : unshare_expr (id->retvar),
1561 retval);
1562 /* id->retvar is already substituted. Skip it on later remapping. */
1563 skip_first = true;
1565 else
1566 return NULL;
1568 else if (gimple_has_substatements (stmt))
1570 gimple_seq s1, s2;
1572 /* When cloning bodies from the C++ front end, we will be handed bodies
1573 in High GIMPLE form. Handle here all the High GIMPLE statements that
1574 have embedded statements. */
1575 switch (gimple_code (stmt))
1577 case GIMPLE_BIND:
1578 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1579 break;
1581 case GIMPLE_CATCH:
1583 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1584 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1585 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1587 break;
1589 case GIMPLE_EH_FILTER:
1590 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1591 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1592 break;
1594 case GIMPLE_TRY:
1595 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1596 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1597 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1598 break;
1600 case GIMPLE_WITH_CLEANUP_EXPR:
1601 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1602 copy = gimple_build_wce (s1);
1603 break;
1605 case GIMPLE_OMP_PARALLEL:
1607 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1608 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1609 copy = gimple_build_omp_parallel
1610 (s1,
1611 gimple_omp_parallel_clauses (omp_par_stmt),
1612 gimple_omp_parallel_child_fn (omp_par_stmt),
1613 gimple_omp_parallel_data_arg (omp_par_stmt));
1615 break;
1617 case GIMPLE_OMP_TASK:
1618 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1619 copy = gimple_build_omp_task
1620 (s1,
1621 gimple_omp_task_clauses (stmt),
1622 gimple_omp_task_child_fn (stmt),
1623 gimple_omp_task_data_arg (stmt),
1624 gimple_omp_task_copy_fn (stmt),
1625 gimple_omp_task_arg_size (stmt),
1626 gimple_omp_task_arg_align (stmt));
1627 break;
1629 case GIMPLE_OMP_FOR:
1630 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1631 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1632 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1633 gimple_omp_for_clauses (stmt),
1634 gimple_omp_for_collapse (stmt), s2);
1636 size_t i;
1637 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1639 gimple_omp_for_set_index (copy, i,
1640 gimple_omp_for_index (stmt, i));
1641 gimple_omp_for_set_initial (copy, i,
1642 gimple_omp_for_initial (stmt, i));
1643 gimple_omp_for_set_final (copy, i,
1644 gimple_omp_for_final (stmt, i));
1645 gimple_omp_for_set_incr (copy, i,
1646 gimple_omp_for_incr (stmt, i));
1647 gimple_omp_for_set_cond (copy, i,
1648 gimple_omp_for_cond (stmt, i));
1651 break;
1653 case GIMPLE_OMP_MASTER:
1654 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1655 copy = gimple_build_omp_master (s1);
1656 break;
1658 case GIMPLE_OMP_TASKGROUP:
1659 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1660 copy = gimple_build_omp_taskgroup
1661 (s1, gimple_omp_taskgroup_clauses (stmt));
1662 break;
1664 case GIMPLE_OMP_ORDERED:
1665 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1666 copy = gimple_build_omp_ordered
1667 (s1,
1668 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1669 break;
1671 case GIMPLE_OMP_SCAN:
1672 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1673 copy = gimple_build_omp_scan
1674 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1675 break;
1677 case GIMPLE_OMP_SECTION:
1678 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1679 copy = gimple_build_omp_section (s1);
1680 break;
1682 case GIMPLE_OMP_SECTIONS:
1683 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1684 copy = gimple_build_omp_sections
1685 (s1, gimple_omp_sections_clauses (stmt));
1686 break;
1688 case GIMPLE_OMP_SINGLE:
1689 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1690 copy = gimple_build_omp_single
1691 (s1, gimple_omp_single_clauses (stmt));
1692 break;
1694 case GIMPLE_OMP_TARGET:
1695 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1696 copy = gimple_build_omp_target
1697 (s1, gimple_omp_target_kind (stmt),
1698 gimple_omp_target_clauses (stmt));
1699 break;
1701 case GIMPLE_OMP_TEAMS:
1702 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1703 copy = gimple_build_omp_teams
1704 (s1, gimple_omp_teams_clauses (stmt));
1705 break;
1707 case GIMPLE_OMP_CRITICAL:
1708 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1709 copy = gimple_build_omp_critical (s1,
1710 gimple_omp_critical_name
1711 (as_a <gomp_critical *> (stmt)),
1712 gimple_omp_critical_clauses
1713 (as_a <gomp_critical *> (stmt)));
1714 break;
1716 case GIMPLE_TRANSACTION:
1718 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1719 gtransaction *new_trans_stmt;
1720 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1721 id);
1722 copy = new_trans_stmt = gimple_build_transaction (s1);
1723 gimple_transaction_set_subcode (new_trans_stmt,
1724 gimple_transaction_subcode (old_trans_stmt));
1725 gimple_transaction_set_label_norm (new_trans_stmt,
1726 gimple_transaction_label_norm (old_trans_stmt));
1727 gimple_transaction_set_label_uninst (new_trans_stmt,
1728 gimple_transaction_label_uninst (old_trans_stmt));
1729 gimple_transaction_set_label_over (new_trans_stmt,
1730 gimple_transaction_label_over (old_trans_stmt));
1732 break;
1734 default:
1735 gcc_unreachable ();
1738 else
1740 if (gimple_assign_copy_p (stmt)
1741 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1742 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1744 /* Here we handle statements that are not completely rewritten.
1745 First we detect some inlining-induced bogosities for
1746 discarding. */
1748 /* Some assignments VAR = VAR; don't generate any rtl code
1749 and thus don't count as variable modification. Avoid
1750 keeping bogosities like 0 = 0. */
1751 tree decl = gimple_assign_lhs (stmt), value;
1752 tree *n;
1754 n = id->decl_map->get (decl);
1755 if (n)
1757 value = *n;
1758 STRIP_TYPE_NOPS (value);
1759 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1760 return NULL;
1764 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1765 in a block that we aren't copying during tree_function_versioning,
1766 just drop the clobber stmt. */
1767 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1769 tree lhs = gimple_assign_lhs (stmt);
1770 if (TREE_CODE (lhs) == MEM_REF
1771 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1773 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1774 if (gimple_bb (def_stmt)
1775 && !bitmap_bit_p (id->blocks_to_copy,
1776 gimple_bb (def_stmt)->index))
1777 return NULL;
1781 /* We do not allow CLOBBERs of handled components. In case
1782 returned value is stored via such handled component, remove
1783 the clobber so stmt verifier is happy. */
1784 if (gimple_clobber_p (stmt)
1785 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1787 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1788 if (!DECL_P (remapped)
1789 && TREE_CODE (remapped) != MEM_REF)
1790 return NULL;
1793 if (gimple_debug_bind_p (stmt))
1795 gdebug *copy
1796 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1797 gimple_debug_bind_get_value (stmt),
1798 stmt);
1799 if (id->reset_location)
1800 gimple_set_location (copy, input_location);
1801 id->debug_stmts.safe_push (copy);
1802 gimple_seq_add_stmt (&stmts, copy);
1803 return stmts;
1805 if (gimple_debug_source_bind_p (stmt))
1807 gdebug *copy = gimple_build_debug_source_bind
1808 (gimple_debug_source_bind_get_var (stmt),
1809 gimple_debug_source_bind_get_value (stmt),
1810 stmt);
1811 if (id->reset_location)
1812 gimple_set_location (copy, input_location);
1813 id->debug_stmts.safe_push (copy);
1814 gimple_seq_add_stmt (&stmts, copy);
1815 return stmts;
1817 if (gimple_debug_nonbind_marker_p (stmt))
1819 /* If the inlined function has too many debug markers,
1820 don't copy them. */
1821 if (id->src_cfun->debug_marker_count
1822 > param_max_debug_marker_count
1823 || id->reset_location)
1824 return stmts;
1826 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1827 id->debug_stmts.safe_push (copy);
1828 gimple_seq_add_stmt (&stmts, copy);
1829 return stmts;
1832 /* Create a new deep copy of the statement. */
1833 copy = gimple_copy (stmt);
1835 /* Clear flags that need revisiting. */
1836 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1838 if (gimple_call_tail_p (call_stmt))
1839 gimple_call_set_tail (call_stmt, false);
1840 if (gimple_call_from_thunk_p (call_stmt))
1841 gimple_call_set_from_thunk (call_stmt, false);
1842 if (gimple_call_internal_p (call_stmt))
1843 switch (gimple_call_internal_fn (call_stmt))
1845 case IFN_GOMP_SIMD_LANE:
1846 case IFN_GOMP_SIMD_VF:
1847 case IFN_GOMP_SIMD_LAST_LANE:
1848 case IFN_GOMP_SIMD_ORDERED_START:
1849 case IFN_GOMP_SIMD_ORDERED_END:
1850 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1851 break;
1852 default:
1853 break;
1857 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1858 RESX and EH_DISPATCH. */
1859 if (id->eh_map)
1860 switch (gimple_code (copy))
1862 case GIMPLE_CALL:
1864 tree r, fndecl = gimple_call_fndecl (copy);
1865 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1866 switch (DECL_FUNCTION_CODE (fndecl))
1868 case BUILT_IN_EH_COPY_VALUES:
1869 r = gimple_call_arg (copy, 1);
1870 r = remap_eh_region_tree_nr (r, id);
1871 gimple_call_set_arg (copy, 1, r);
1872 /* FALLTHRU */
1874 case BUILT_IN_EH_POINTER:
1875 case BUILT_IN_EH_FILTER:
1876 r = gimple_call_arg (copy, 0);
1877 r = remap_eh_region_tree_nr (r, id);
1878 gimple_call_set_arg (copy, 0, r);
1879 break;
1881 default:
1882 break;
1885 /* Reset alias info if we didn't apply measures to
1886 keep it valid over inlining by setting DECL_PT_UID. */
1887 if (!id->src_cfun->gimple_df
1888 || !id->src_cfun->gimple_df->ipa_pta)
1889 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1891 break;
1893 case GIMPLE_RESX:
1895 gresx *resx_stmt = as_a <gresx *> (copy);
1896 int r = gimple_resx_region (resx_stmt);
1897 r = remap_eh_region_nr (r, id);
1898 gimple_resx_set_region (resx_stmt, r);
1900 break;
1902 case GIMPLE_EH_DISPATCH:
1904 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1905 int r = gimple_eh_dispatch_region (eh_dispatch);
1906 r = remap_eh_region_nr (r, id);
1907 gimple_eh_dispatch_set_region (eh_dispatch, r);
1909 break;
1911 default:
1912 break;
1916 /* If STMT has a block defined, map it to the newly constructed block. */
1917 if (tree block = gimple_block (copy))
1919 tree *n;
1920 n = id->decl_map->get (block);
1921 gcc_assert (n);
1922 gimple_set_block (copy, *n);
1924 if (id->param_body_adjs)
1926 gimple_seq extra_stmts = NULL;
1927 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
1928 if (!gimple_seq_empty_p (extra_stmts))
1930 memset (&wi, 0, sizeof (wi));
1931 wi.info = id;
1932 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1933 !gsi_end_p (egsi);
1934 gsi_next (&egsi))
1935 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1936 gimple_seq_add_seq (&stmts, extra_stmts);
1940 if (id->reset_location)
1941 gimple_set_location (copy, input_location);
1943 /* Debug statements ought to be rebuilt and not copied. */
1944 gcc_checking_assert (!is_gimple_debug (copy));
1946 /* Remap all the operands in COPY. */
1947 memset (&wi, 0, sizeof (wi));
1948 wi.info = id;
1949 if (skip_first)
1950 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1951 else
1952 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1954 /* Clear the copied virtual operands. We are not remapping them here
1955 but are going to recreate them from scratch. */
1956 if (gimple_has_mem_ops (copy))
1958 gimple_set_vdef (copy, NULL_TREE);
1959 gimple_set_vuse (copy, NULL_TREE);
1962 if (cfun->can_throw_non_call_exceptions)
1964 /* When inlining a function which does not have non-call exceptions
1965 enabled into a function that has (which only happens with
1966 always-inline) we have to fixup stmts that cannot throw. */
1967 if (gcond *cond = dyn_cast <gcond *> (copy))
1968 if (gimple_could_trap_p (cond))
1970 gassign *cmp
1971 = gimple_build_assign (make_ssa_name (boolean_type_node),
1972 gimple_cond_code (cond),
1973 gimple_cond_lhs (cond),
1974 gimple_cond_rhs (cond));
1975 gimple_seq_add_stmt (&stmts, cmp);
1976 gimple_cond_set_code (cond, NE_EXPR);
1977 gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
1978 gimple_cond_set_rhs (cond, boolean_false_node);
1982 gimple_seq_add_stmt (&stmts, copy);
1983 return stmts;
1987 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1988 later */
1990 static basic_block
1991 copy_bb (copy_body_data *id, basic_block bb,
1992 profile_count num, profile_count den)
1994 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1995 basic_block copy_basic_block;
1996 tree decl;
1997 basic_block prev;
1999 profile_count::adjust_for_ipa_scaling (&num, &den);
2001 /* Search for previous copied basic block. */
2002 prev = bb->prev_bb;
2003 while (!prev->aux)
2004 prev = prev->prev_bb;
2006 /* create_basic_block() will append every new block to
2007 basic_block_info automatically. */
2008 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2009 copy_basic_block->count = bb->count.apply_scale (num, den);
2011 copy_gsi = gsi_start_bb (copy_basic_block);
2013 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2015 gimple_seq stmts;
2016 gimple *stmt = gsi_stmt (gsi);
2017 gimple *orig_stmt = stmt;
2018 gimple_stmt_iterator stmts_gsi;
2019 bool stmt_added = false;
2021 id->regimplify = false;
2022 stmts = remap_gimple_stmt (stmt, id);
2024 if (gimple_seq_empty_p (stmts))
2025 continue;
2027 seq_gsi = copy_gsi;
2029 for (stmts_gsi = gsi_start (stmts);
2030 !gsi_end_p (stmts_gsi); )
2032 stmt = gsi_stmt (stmts_gsi);
2034 /* Advance iterator now before stmt is moved to seq_gsi. */
2035 gsi_next (&stmts_gsi);
2037 if (gimple_nop_p (stmt))
2038 continue;
2040 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2041 orig_stmt);
2043 /* With return slot optimization we can end up with
2044 non-gimple (foo *)&this->m, fix that here. */
2045 if (is_gimple_assign (stmt)
2046 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2047 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2049 tree new_rhs;
2050 new_rhs = force_gimple_operand_gsi (&seq_gsi,
2051 gimple_assign_rhs1 (stmt),
2052 true, NULL, false,
2053 GSI_CONTINUE_LINKING);
2054 gimple_assign_set_rhs1 (stmt, new_rhs);
2055 id->regimplify = false;
2058 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2060 if (id->regimplify)
2061 gimple_regimplify_operands (stmt, &seq_gsi);
2063 stmt_added = true;
2066 if (!stmt_added)
2067 continue;
2069 /* If copy_basic_block has been empty at the start of this iteration,
2070 call gsi_start_bb again to get at the newly added statements. */
2071 if (gsi_end_p (copy_gsi))
2072 copy_gsi = gsi_start_bb (copy_basic_block);
2073 else
2074 gsi_next (&copy_gsi);
2076 /* Process the new statement. The call to gimple_regimplify_operands
2077 possibly turned the statement into multiple statements, we
2078 need to process all of them. */
2081 tree fn;
2082 gcall *call_stmt;
2084 stmt = gsi_stmt (copy_gsi);
2085 call_stmt = dyn_cast <gcall *> (stmt);
2086 if (call_stmt
2087 && gimple_call_va_arg_pack_p (call_stmt)
2088 && id->call_stmt
2089 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2091 /* __builtin_va_arg_pack () should be replaced by
2092 all arguments corresponding to ... in the caller. */
2093 tree p;
2094 gcall *new_call;
2095 vec<tree> argarray;
2096 size_t nargs = gimple_call_num_args (id->call_stmt);
2097 size_t n;
2099 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2100 nargs--;
2102 /* Create the new array of arguments. */
2103 n = nargs + gimple_call_num_args (call_stmt);
2104 argarray.create (n);
2105 argarray.safe_grow_cleared (n, true);
2107 /* Copy all the arguments before '...' */
2108 memcpy (argarray.address (),
2109 gimple_call_arg_ptr (call_stmt, 0),
2110 gimple_call_num_args (call_stmt) * sizeof (tree));
2112 /* Append the arguments passed in '...' */
2113 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2114 gimple_call_arg_ptr (id->call_stmt, 0)
2115 + (gimple_call_num_args (id->call_stmt) - nargs),
2116 nargs * sizeof (tree));
2118 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2119 argarray);
2121 argarray.release ();
2123 /* Copy all GIMPLE_CALL flags, location and block, except
2124 GF_CALL_VA_ARG_PACK. */
2125 gimple_call_copy_flags (new_call, call_stmt);
2126 gimple_call_set_va_arg_pack (new_call, false);
2127 /* location includes block. */
2128 gimple_set_location (new_call, gimple_location (stmt));
2129 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2131 gsi_replace (&copy_gsi, new_call, false);
2132 stmt = new_call;
2134 else if (call_stmt
2135 && id->call_stmt
2136 && (decl = gimple_call_fndecl (stmt))
2137 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2139 /* __builtin_va_arg_pack_len () should be replaced by
2140 the number of anonymous arguments. */
2141 size_t nargs = gimple_call_num_args (id->call_stmt);
2142 tree count, p;
2143 gimple *new_stmt;
2145 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2146 nargs--;
2148 if (!gimple_call_lhs (stmt))
2150 /* Drop unused calls. */
2151 gsi_remove (&copy_gsi, false);
2152 continue;
2154 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2156 count = build_int_cst (integer_type_node, nargs);
2157 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2158 gsi_replace (&copy_gsi, new_stmt, false);
2159 stmt = new_stmt;
2161 else if (nargs != 0)
2163 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2164 count = build_int_cst (integer_type_node, nargs);
2165 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2166 PLUS_EXPR, newlhs, count);
2167 gimple_call_set_lhs (stmt, newlhs);
2168 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2171 else if (call_stmt
2172 && id->call_stmt
2173 && gimple_call_internal_p (stmt)
2174 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2176 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2177 gsi_remove (&copy_gsi, false);
2178 continue;
2181 /* Statements produced by inlining can be unfolded, especially
2182 when we constant propagated some operands. We can't fold
2183 them right now for two reasons:
2184 1) folding require SSA_NAME_DEF_STMTs to be correct
2185 2) we can't change function calls to builtins.
2186 So we just mark statement for later folding. We mark
2187 all new statements, instead just statements that has changed
2188 by some nontrivial substitution so even statements made
2189 foldable indirectly are updated. If this turns out to be
2190 expensive, copy_body can be told to watch for nontrivial
2191 changes. */
2192 if (id->statements_to_fold)
2193 id->statements_to_fold->add (stmt);
2195 /* We're duplicating a CALL_EXPR. Find any corresponding
2196 callgraph edges and update or duplicate them. */
2197 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2199 struct cgraph_edge *edge;
2201 switch (id->transform_call_graph_edges)
2203 case CB_CGE_DUPLICATE:
2204 edge = id->src_node->get_edge (orig_stmt);
2205 if (edge)
2207 struct cgraph_edge *old_edge = edge;
2209 /* A speculative call is consist of multiple
2210 edges - indirect edge and one or more direct edges
2211 Duplicate the whole thing and distribute frequencies
2212 accordingly. */
2213 if (edge->speculative)
2215 int n = 0;
2216 profile_count direct_cnt
2217 = profile_count::zero ();
2219 /* First figure out the distribution of counts
2220 so we can re-scale BB profile accordingly. */
2221 for (cgraph_edge *e = old_edge; e;
2222 e = e->next_speculative_call_target ())
2223 direct_cnt = direct_cnt + e->count;
2225 cgraph_edge *indirect
2226 = old_edge->speculative_call_indirect_edge ();
2227 profile_count indir_cnt = indirect->count;
2229 /* Next iterate all direct edges, clone it and its
2230 corresponding reference and update profile. */
2231 for (cgraph_edge *e = old_edge;
2233 e = e->next_speculative_call_target ())
2235 profile_count cnt = e->count;
2237 id->dst_node->clone_reference
2238 (e->speculative_call_target_ref (), stmt);
2239 edge = e->clone (id->dst_node, call_stmt,
2240 gimple_uid (stmt), num, den,
2241 true);
2242 profile_probability prob
2243 = cnt.probability_in (direct_cnt
2244 + indir_cnt);
2245 edge->count
2246 = copy_basic_block->count.apply_probability
2247 (prob);
2248 n++;
2250 gcc_checking_assert
2251 (indirect->num_speculative_call_targets_p ()
2252 == n);
2254 /* Duplicate the indirect edge after all direct edges
2255 cloned. */
2256 indirect = indirect->clone (id->dst_node, call_stmt,
2257 gimple_uid (stmt),
2258 num, den,
2259 true);
2261 profile_probability prob
2262 = indir_cnt.probability_in (direct_cnt
2263 + indir_cnt);
2264 indirect->count
2265 = copy_basic_block->count.apply_probability (prob);
2267 else
2269 edge = edge->clone (id->dst_node, call_stmt,
2270 gimple_uid (stmt),
2271 num, den,
2272 true);
2273 edge->count = copy_basic_block->count;
2276 break;
2278 case CB_CGE_MOVE_CLONES:
2279 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2280 call_stmt);
2281 edge = id->dst_node->get_edge (stmt);
2282 break;
2284 case CB_CGE_MOVE:
2285 edge = id->dst_node->get_edge (orig_stmt);
2286 if (edge)
2287 edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2288 break;
2290 default:
2291 gcc_unreachable ();
2294 /* Constant propagation on argument done during inlining
2295 may create new direct call. Produce an edge for it. */
2296 if ((!edge
2297 || (edge->indirect_inlining_edge
2298 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2299 && id->dst_node->definition
2300 && (fn = gimple_call_fndecl (stmt)) != NULL)
2302 struct cgraph_node *dest = cgraph_node::get_create (fn);
2304 /* We have missing edge in the callgraph. This can happen
2305 when previous inlining turned an indirect call into a
2306 direct call by constant propagating arguments or we are
2307 producing dead clone (for further cloning). In all
2308 other cases we hit a bug (incorrect node sharing is the
2309 most common reason for missing edges). */
2310 gcc_assert (!dest->definition
2311 || dest->address_taken
2312 || !id->src_node->definition
2313 || !id->dst_node->definition);
2314 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2315 id->dst_node->create_edge_including_clones
2316 (dest, orig_stmt, call_stmt, bb->count,
2317 CIF_ORIGINALLY_INDIRECT_CALL);
2318 else
2319 id->dst_node->create_edge (dest, call_stmt,
2320 bb->count)->inline_failed
2321 = CIF_ORIGINALLY_INDIRECT_CALL;
2322 if (dump_file)
2324 fprintf (dump_file, "Created new direct edge to %s\n",
2325 dest->dump_name ());
2329 notice_special_calls (as_a <gcall *> (stmt));
2332 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2333 id->eh_map, id->eh_lp_nr);
2335 gsi_next (&copy_gsi);
2337 while (!gsi_end_p (copy_gsi));
2339 copy_gsi = gsi_last_bb (copy_basic_block);
2342 return copy_basic_block;
2345 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2346 form is quite easy, since dominator relationship for old basic blocks does
2347 not change.
2349 There is however exception where inlining might change dominator relation
2350 across EH edges from basic block within inlined functions destinating
2351 to landing pads in function we inline into.
2353 The function fills in PHI_RESULTs of such PHI nodes if they refer
2354 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2355 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2356 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2357 set, and this means that there will be no overlapping live ranges
2358 for the underlying symbol.
2360 This might change in future if we allow redirecting of EH edges and
2361 we might want to change way build CFG pre-inlining to include
2362 all the possible edges then. */
2363 static void
2364 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2365 bool can_throw, bool nonlocal_goto)
2367 edge e;
2368 edge_iterator ei;
2370 FOR_EACH_EDGE (e, ei, bb->succs)
2371 if (!e->dest->aux
2372 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2374 gphi *phi;
2375 gphi_iterator si;
2377 if (!nonlocal_goto)
2378 gcc_assert (e->flags & EDGE_EH);
2380 if (!can_throw)
2381 gcc_assert (!(e->flags & EDGE_EH));
2383 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2385 edge re;
2387 phi = si.phi ();
2389 /* For abnormal goto/call edges the receiver can be the
2390 ENTRY_BLOCK. Do not assert this cannot happen. */
2392 gcc_assert ((e->flags & EDGE_EH)
2393 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2395 re = find_edge (ret_bb, e->dest);
2396 gcc_checking_assert (re);
2397 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2398 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2400 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2401 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2406 /* Insert clobbers for automatic variables of inlined ID->src_fn
2407 function at the start of basic block ID->eh_landing_pad_dest. */
2409 static void
2410 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2412 tree var;
2413 basic_block bb = id->eh_landing_pad_dest;
2414 live_vars_map *vars = NULL;
2415 unsigned int cnt = 0;
2416 unsigned int i;
2417 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2418 if (VAR_P (var)
2419 && !DECL_HARD_REGISTER (var)
2420 && !TREE_THIS_VOLATILE (var)
2421 && !DECL_HAS_VALUE_EXPR_P (var)
2422 && !is_gimple_reg (var)
2423 && auto_var_in_fn_p (var, id->src_fn)
2424 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2426 tree *t = id->decl_map->get (var);
2427 if (!t)
2428 continue;
2429 tree new_var = *t;
2430 if (VAR_P (new_var)
2431 && !DECL_HARD_REGISTER (new_var)
2432 && !TREE_THIS_VOLATILE (new_var)
2433 && !DECL_HAS_VALUE_EXPR_P (new_var)
2434 && !is_gimple_reg (new_var)
2435 && auto_var_in_fn_p (new_var, id->dst_fn))
2437 if (vars == NULL)
2438 vars = new live_vars_map;
2439 vars->put (DECL_UID (var), cnt++);
2442 if (vars == NULL)
2443 return;
2445 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2446 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2447 if (VAR_P (var))
2449 edge e;
2450 edge_iterator ei;
2451 bool needed = false;
2452 unsigned int *v = vars->get (DECL_UID (var));
2453 if (v == NULL)
2454 continue;
2455 FOR_EACH_EDGE (e, ei, bb->preds)
2456 if ((e->flags & EDGE_EH) != 0
2457 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2459 basic_block src_bb = (basic_block) e->src->aux;
2461 if (bitmap_bit_p (&live[src_bb->index], *v))
2463 needed = true;
2464 break;
2467 if (needed)
2469 tree new_var = *id->decl_map->get (var);
2470 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2471 tree clobber = build_clobber (TREE_TYPE (new_var));
2472 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2473 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2476 destroy_live_vars (live);
2477 delete vars;
2480 /* Copy edges from BB into its copy constructed earlier, scale profile
2481 accordingly. Edges will be taken care of later. Assume aux
2482 pointers to point to the copies of each BB. Return true if any
2483 debug stmts are left after a statement that must end the basic block. */
2485 static bool
2486 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2487 basic_block ret_bb, basic_block abnormal_goto_dest,
2488 copy_body_data *id)
2490 basic_block new_bb = (basic_block) bb->aux;
2491 edge_iterator ei;
2492 edge old_edge;
2493 gimple_stmt_iterator si;
2494 bool need_debug_cleanup = false;
2496 /* Use the indices from the original blocks to create edges for the
2497 new ones. */
2498 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2499 if (!(old_edge->flags & EDGE_EH))
2501 edge new_edge;
2502 int flags = old_edge->flags;
2503 location_t locus = old_edge->goto_locus;
2505 /* Return edges do get a FALLTHRU flag when they get inlined. */
2506 if (old_edge->dest->index == EXIT_BLOCK
2507 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2508 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2509 flags |= EDGE_FALLTHRU;
2511 new_edge
2512 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2513 new_edge->probability = old_edge->probability;
2514 if (!id->reset_location)
2515 new_edge->goto_locus = remap_location (locus, id);
2518 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2519 return false;
2521 /* When doing function splitting, we must decrease count of the return block
2522 which was previously reachable by block we did not copy. */
2523 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2524 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2525 if (old_edge->src->index != ENTRY_BLOCK
2526 && !old_edge->src->aux)
2527 new_bb->count -= old_edge->count ().apply_scale (num, den);
2529 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2531 gimple *copy_stmt;
2532 bool can_throw, nonlocal_goto;
2534 copy_stmt = gsi_stmt (si);
2535 if (!is_gimple_debug (copy_stmt))
2536 update_stmt (copy_stmt);
2538 /* Do this before the possible split_block. */
2539 gsi_next (&si);
2541 /* If this tree could throw an exception, there are two
2542 cases where we need to add abnormal edge(s): the
2543 tree wasn't in a region and there is a "current
2544 region" in the caller; or the original tree had
2545 EH edges. In both cases split the block after the tree,
2546 and add abnormal edge(s) as needed; we need both
2547 those from the callee and the caller.
2548 We check whether the copy can throw, because the const
2549 propagation can change an INDIRECT_REF which throws
2550 into a COMPONENT_REF which doesn't. If the copy
2551 can throw, the original could also throw. */
2552 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2553 nonlocal_goto
2554 = (stmt_can_make_abnormal_goto (copy_stmt)
2555 && !computed_goto_p (copy_stmt));
2557 if (can_throw || nonlocal_goto)
2559 if (!gsi_end_p (si))
2561 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2562 gsi_next (&si);
2563 if (gsi_end_p (si))
2564 need_debug_cleanup = true;
2566 if (!gsi_end_p (si))
2567 /* Note that bb's predecessor edges aren't necessarily
2568 right at this point; split_block doesn't care. */
2570 edge e = split_block (new_bb, copy_stmt);
2572 new_bb = e->dest;
2573 new_bb->aux = e->src->aux;
2574 si = gsi_start_bb (new_bb);
2578 bool update_probs = false;
2580 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2582 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2583 update_probs = true;
2585 else if (can_throw)
2587 make_eh_edges (copy_stmt);
2588 update_probs = true;
2591 /* EH edges may not match old edges. Copy as much as possible. */
2592 if (update_probs)
2594 edge e;
2595 edge_iterator ei;
2596 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2598 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2599 if ((old_edge->flags & EDGE_EH)
2600 && (e = find_edge (copy_stmt_bb,
2601 (basic_block) old_edge->dest->aux))
2602 && (e->flags & EDGE_EH))
2603 e->probability = old_edge->probability;
2605 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2606 if (e->flags & EDGE_EH)
2608 if (!e->probability.initialized_p ())
2609 e->probability = profile_probability::never ();
2610 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2612 if (id->eh_landing_pad_dest == NULL)
2613 id->eh_landing_pad_dest = e->dest;
2614 else
2615 gcc_assert (id->eh_landing_pad_dest == e->dest);
2621 /* If the call we inline cannot make abnormal goto do not add
2622 additional abnormal edges but only retain those already present
2623 in the original function body. */
2624 if (abnormal_goto_dest == NULL)
2625 nonlocal_goto = false;
2626 if (nonlocal_goto)
2628 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2630 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2631 nonlocal_goto = false;
2632 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2633 in OpenMP regions which aren't allowed to be left abnormally.
2634 So, no need to add abnormal edge in that case. */
2635 else if (is_gimple_call (copy_stmt)
2636 && gimple_call_internal_p (copy_stmt)
2637 && (gimple_call_internal_fn (copy_stmt)
2638 == IFN_ABNORMAL_DISPATCHER)
2639 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2640 nonlocal_goto = false;
2641 else
2642 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2643 EDGE_ABNORMAL);
2646 if ((can_throw || nonlocal_goto)
2647 && gimple_in_ssa_p (cfun))
2648 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2649 can_throw, nonlocal_goto);
2651 return need_debug_cleanup;
2654 /* Copy the PHIs. All blocks and edges are copied, some blocks
2655 was possibly split and new outgoing EH edges inserted.
2656 BB points to the block of original function and AUX pointers links
2657 the original and newly copied blocks. */
2659 static void
2660 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2662 basic_block const new_bb = (basic_block) bb->aux;
2663 edge_iterator ei;
2664 gphi *phi;
2665 gphi_iterator si;
2666 edge new_edge;
2667 bool inserted = false;
2669 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2671 tree res, new_res;
2672 gphi *new_phi;
2674 phi = si.phi ();
2675 res = PHI_RESULT (phi);
2676 new_res = res;
2677 if (!virtual_operand_p (res))
2679 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2680 if (EDGE_COUNT (new_bb->preds) == 0)
2682 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2683 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2685 else
2687 new_phi = create_phi_node (new_res, new_bb);
2688 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2690 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2691 bb);
2692 tree arg;
2693 tree new_arg;
2694 edge_iterator ei2;
2695 location_t locus;
2697 /* When doing partial cloning, we allow PHIs on the entry
2698 block as long as all the arguments are the same.
2699 Find any input edge to see argument to copy. */
2700 if (!old_edge)
2701 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2702 if (!old_edge->src->aux)
2703 break;
2705 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2706 new_arg = arg;
2707 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2708 gcc_assert (new_arg);
2709 /* With return slot optimization we can end up with
2710 non-gimple (foo *)&this->m, fix that here. */
2711 if (TREE_CODE (new_arg) != SSA_NAME
2712 && TREE_CODE (new_arg) != FUNCTION_DECL
2713 && !is_gimple_val (new_arg))
2715 gimple_seq stmts = NULL;
2716 new_arg = force_gimple_operand (new_arg, &stmts, true,
2717 NULL);
2718 gsi_insert_seq_on_edge (new_edge, stmts);
2719 inserted = true;
2721 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2722 if (id->reset_location)
2723 locus = input_location;
2724 else
2725 locus = remap_location (locus, id);
2726 add_phi_arg (new_phi, new_arg, new_edge, locus);
2732 /* Commit the delayed edge insertions. */
2733 if (inserted)
2734 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2735 gsi_commit_one_edge_insert (new_edge, NULL);
2739 /* Wrapper for remap_decl so it can be used as a callback. */
2741 static tree
2742 remap_decl_1 (tree decl, void *data)
2744 return remap_decl (decl, (copy_body_data *) data);
2747 /* Build struct function and associated datastructures for the new clone
2748 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2749 the cfun to the function of new_fndecl (and current_function_decl too). */
2751 static void
2752 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2754 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2756 if (!DECL_ARGUMENTS (new_fndecl))
2757 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2758 if (!DECL_RESULT (new_fndecl))
2759 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2761 /* Register specific tree functions. */
2762 gimple_register_cfg_hooks ();
2764 /* Get clean struct function. */
2765 push_struct_function (new_fndecl);
2767 /* We will rebuild these, so just sanity check that they are empty. */
2768 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2769 gcc_assert (cfun->local_decls == NULL);
2770 gcc_assert (cfun->cfg == NULL);
2771 gcc_assert (cfun->decl == new_fndecl);
2773 /* Copy items we preserve during cloning. */
2774 cfun->static_chain_decl = src_cfun->static_chain_decl;
2775 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2776 cfun->function_end_locus = src_cfun->function_end_locus;
2777 cfun->curr_properties = src_cfun->curr_properties;
2778 cfun->last_verified = src_cfun->last_verified;
2779 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2780 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2781 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2782 cfun->calls_eh_return = src_cfun->calls_eh_return;
2783 cfun->stdarg = src_cfun->stdarg;
2784 cfun->after_inlining = src_cfun->after_inlining;
2785 cfun->can_throw_non_call_exceptions
2786 = src_cfun->can_throw_non_call_exceptions;
2787 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2788 cfun->returns_struct = src_cfun->returns_struct;
2789 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2791 init_empty_tree_cfg ();
2793 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2795 profile_count num = count;
2796 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2797 profile_count::adjust_for_ipa_scaling (&num, &den);
2799 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2800 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2801 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2802 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2803 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2804 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2805 if (src_cfun->eh)
2806 init_eh_for_function ();
2808 if (src_cfun->gimple_df)
2810 init_tree_ssa (cfun);
2811 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2812 if (cfun->gimple_df->in_ssa_p)
2813 init_ssa_operands (cfun);
2817 /* Helper function for copy_cfg_body. Move debug stmts from the end
2818 of NEW_BB to the beginning of successor basic blocks when needed. If the
2819 successor has multiple predecessors, reset them, otherwise keep
2820 their value. */
2822 static void
2823 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2825 edge e;
2826 edge_iterator ei;
2827 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2829 if (gsi_end_p (si)
2830 || gsi_one_before_end_p (si)
2831 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2832 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2833 return;
2835 FOR_EACH_EDGE (e, ei, new_bb->succs)
2837 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2838 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2839 while (is_gimple_debug (gsi_stmt (ssi)))
2841 gimple *stmt = gsi_stmt (ssi);
2842 gdebug *new_stmt;
2843 tree var;
2844 tree value;
2846 /* For the last edge move the debug stmts instead of copying
2847 them. */
2848 if (ei_one_before_end_p (ei))
2850 si = ssi;
2851 gsi_prev (&ssi);
2852 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2854 gimple_debug_bind_reset_value (stmt);
2855 gimple_set_location (stmt, UNKNOWN_LOCATION);
2857 gsi_remove (&si, false);
2858 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2859 continue;
2862 if (gimple_debug_bind_p (stmt))
2864 var = gimple_debug_bind_get_var (stmt);
2865 if (single_pred_p (e->dest))
2867 value = gimple_debug_bind_get_value (stmt);
2868 value = unshare_expr (value);
2869 new_stmt = gimple_build_debug_bind (var, value, stmt);
2871 else
2872 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2874 else if (gimple_debug_source_bind_p (stmt))
2876 var = gimple_debug_source_bind_get_var (stmt);
2877 value = gimple_debug_source_bind_get_value (stmt);
2878 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2880 else if (gimple_debug_nonbind_marker_p (stmt))
2881 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2882 else
2883 gcc_unreachable ();
2884 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2885 id->debug_stmts.safe_push (new_stmt);
2886 gsi_prev (&ssi);
2891 /* Make a copy of the sub-loops of SRC_PARENT and place them
2892 as siblings of DEST_PARENT. */
2894 static void
2895 copy_loops (copy_body_data *id,
2896 class loop *dest_parent, class loop *src_parent)
2898 class loop *src_loop = src_parent->inner;
2899 while (src_loop)
2901 if (!id->blocks_to_copy
2902 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2904 class loop *dest_loop = alloc_loop ();
2906 /* Assign the new loop its header and latch and associate
2907 those with the new loop. */
2908 dest_loop->header = (basic_block)src_loop->header->aux;
2909 dest_loop->header->loop_father = dest_loop;
2910 if (src_loop->latch != NULL)
2912 dest_loop->latch = (basic_block)src_loop->latch->aux;
2913 dest_loop->latch->loop_father = dest_loop;
2916 /* Copy loop meta-data. */
2917 copy_loop_info (src_loop, dest_loop);
2918 if (dest_loop->unroll)
2919 cfun->has_unroll = true;
2920 if (dest_loop->force_vectorize)
2921 cfun->has_force_vectorize_loops = true;
2922 if (id->src_cfun->last_clique != 0)
2923 dest_loop->owned_clique
2924 = remap_dependence_clique (id,
2925 src_loop->owned_clique
2926 ? src_loop->owned_clique : 1);
2928 /* Finally place it into the loop array and the loop tree. */
2929 place_new_loop (cfun, dest_loop);
2930 flow_loop_tree_node_add (dest_parent, dest_loop);
2932 if (src_loop->simduid)
2934 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2935 cfun->has_simduid_loops = true;
2938 /* Recurse. */
2939 copy_loops (id, dest_loop, src_loop);
2941 src_loop = src_loop->next;
2945 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2947 void
2948 redirect_all_calls (copy_body_data * id, basic_block bb)
2950 gimple_stmt_iterator si;
2951 gimple *last = last_stmt (bb);
2952 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2954 gimple *stmt = gsi_stmt (si);
2955 if (is_gimple_call (stmt))
2957 tree old_lhs = gimple_call_lhs (stmt);
2958 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2959 if (edge)
2961 gimple *new_stmt
2962 = cgraph_edge::redirect_call_stmt_to_callee (edge);
2963 /* If IPA-SRA transformation, run as part of edge redirection,
2964 removed the LHS because it is unused, save it to
2965 killed_new_ssa_names so that we can prune it from debug
2966 statements. */
2967 if (old_lhs
2968 && TREE_CODE (old_lhs) == SSA_NAME
2969 && !gimple_call_lhs (new_stmt))
2971 if (!id->killed_new_ssa_names)
2972 id->killed_new_ssa_names = new hash_set<tree> (16);
2973 id->killed_new_ssa_names->add (old_lhs);
2976 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2977 gimple_purge_dead_eh_edges (bb);
2983 /* Make a copy of the body of FN so that it can be inserted inline in
2984 another function. Walks FN via CFG, returns new fndecl. */
2986 static tree
2987 copy_cfg_body (copy_body_data * id,
2988 basic_block entry_block_map, basic_block exit_block_map,
2989 basic_block new_entry)
2991 tree callee_fndecl = id->src_fn;
2992 /* Original cfun for the callee, doesn't change. */
2993 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2994 struct function *cfun_to_copy;
2995 basic_block bb;
2996 tree new_fndecl = NULL;
2997 bool need_debug_cleanup = false;
2998 int last;
2999 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3000 profile_count num = entry_block_map->count;
3002 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3004 /* Register specific tree functions. */
3005 gimple_register_cfg_hooks ();
3007 /* If we are inlining just region of the function, make sure to connect
3008 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
3009 part of loop, we must compute frequency and probability of
3010 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3011 probabilities of edges incoming from nonduplicated region. */
3012 if (new_entry)
3014 edge e;
3015 edge_iterator ei;
3016 den = profile_count::zero ();
3018 FOR_EACH_EDGE (e, ei, new_entry->preds)
3019 if (!e->src->aux)
3020 den += e->count ();
3021 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
3024 profile_count::adjust_for_ipa_scaling (&num, &den);
3026 /* Must have a CFG here at this point. */
3027 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3028 (DECL_STRUCT_FUNCTION (callee_fndecl)));
3031 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3032 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3033 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3034 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3036 /* Duplicate any exception-handling regions. */
3037 if (cfun->eh)
3038 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3039 remap_decl_1, id);
3041 /* Use aux pointers to map the original blocks to copy. */
3042 FOR_EACH_BB_FN (bb, cfun_to_copy)
3043 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3045 basic_block new_bb = copy_bb (id, bb, num, den);
3046 bb->aux = new_bb;
3047 new_bb->aux = bb;
3048 new_bb->loop_father = entry_block_map->loop_father;
3051 last = last_basic_block_for_fn (cfun);
3053 /* Now that we've duplicated the blocks, duplicate their edges. */
3054 basic_block abnormal_goto_dest = NULL;
3055 if (id->call_stmt
3056 && stmt_can_make_abnormal_goto (id->call_stmt))
3058 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3060 bb = gimple_bb (id->call_stmt);
3061 gsi_next (&gsi);
3062 if (gsi_end_p (gsi))
3063 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3065 FOR_ALL_BB_FN (bb, cfun_to_copy)
3066 if (!id->blocks_to_copy
3067 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3068 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3069 abnormal_goto_dest, id);
3071 if (id->eh_landing_pad_dest)
3073 add_clobbers_to_eh_landing_pad (id);
3074 id->eh_landing_pad_dest = NULL;
3077 if (new_entry)
3079 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3080 EDGE_FALLTHRU);
3081 e->probability = profile_probability::always ();
3084 /* Duplicate the loop tree, if available and wanted. */
3085 if (loops_for_fn (src_cfun) != NULL
3086 && current_loops != NULL)
3088 copy_loops (id, entry_block_map->loop_father,
3089 get_loop (src_cfun, 0));
3090 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3091 loops_state_set (LOOPS_NEED_FIXUP);
3094 /* If the loop tree in the source function needed fixup, mark the
3095 destination loop tree for fixup, too. */
3096 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3097 loops_state_set (LOOPS_NEED_FIXUP);
3099 if (gimple_in_ssa_p (cfun))
3100 FOR_ALL_BB_FN (bb, cfun_to_copy)
3101 if (!id->blocks_to_copy
3102 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3103 copy_phis_for_bb (bb, id);
3105 FOR_ALL_BB_FN (bb, cfun_to_copy)
3106 if (bb->aux)
3108 if (need_debug_cleanup
3109 && bb->index != ENTRY_BLOCK
3110 && bb->index != EXIT_BLOCK)
3111 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3112 /* Update call edge destinations. This cannot be done before loop
3113 info is updated, because we may split basic blocks. */
3114 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3115 && bb->index != ENTRY_BLOCK
3116 && bb->index != EXIT_BLOCK)
3117 redirect_all_calls (id, (basic_block)bb->aux);
3118 ((basic_block)bb->aux)->aux = NULL;
3119 bb->aux = NULL;
3122 /* Zero out AUX fields of newly created block during EH edge
3123 insertion. */
3124 for (; last < last_basic_block_for_fn (cfun); last++)
3126 if (need_debug_cleanup)
3127 maybe_move_debug_stmts_to_successors (id,
3128 BASIC_BLOCK_FOR_FN (cfun, last));
3129 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3130 /* Update call edge destinations. This cannot be done before loop
3131 info is updated, because we may split basic blocks. */
3132 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3133 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3135 entry_block_map->aux = NULL;
3136 exit_block_map->aux = NULL;
3138 if (id->eh_map)
3140 delete id->eh_map;
3141 id->eh_map = NULL;
3143 if (id->dependence_map)
3145 delete id->dependence_map;
3146 id->dependence_map = NULL;
3149 return new_fndecl;
3152 /* Copy the debug STMT using ID. We deal with these statements in a
3153 special way: if any variable in their VALUE expression wasn't
3154 remapped yet, we won't remap it, because that would get decl uids
3155 out of sync, causing codegen differences between -g and -g0. If
3156 this arises, we drop the VALUE expression altogether. */
3158 static void
3159 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3161 tree t, *n;
3162 struct walk_stmt_info wi;
3164 if (tree block = gimple_block (stmt))
3166 n = id->decl_map->get (block);
3167 gimple_set_block (stmt, n ? *n : id->block);
3170 if (gimple_debug_nonbind_marker_p (stmt))
3172 if (id->call_stmt && !gimple_block (stmt))
3174 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3175 gsi_remove (&gsi, true);
3177 return;
3180 /* Remap all the operands in COPY. */
3181 memset (&wi, 0, sizeof (wi));
3182 wi.info = id;
3184 processing_debug_stmt = 1;
3186 if (gimple_debug_source_bind_p (stmt))
3187 t = gimple_debug_source_bind_get_var (stmt);
3188 else if (gimple_debug_bind_p (stmt))
3189 t = gimple_debug_bind_get_var (stmt);
3190 else
3191 gcc_unreachable ();
3193 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3194 && (n = id->debug_map->get (t)))
3196 gcc_assert (VAR_P (*n));
3197 t = *n;
3199 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3200 /* T is a non-localized variable. */;
3201 else
3202 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3204 if (gimple_debug_bind_p (stmt))
3206 gimple_debug_bind_set_var (stmt, t);
3208 if (gimple_debug_bind_has_value_p (stmt))
3209 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3210 remap_gimple_op_r, &wi, NULL);
3212 /* Punt if any decl couldn't be remapped. */
3213 if (processing_debug_stmt < 0)
3214 gimple_debug_bind_reset_value (stmt);
3216 else if (gimple_debug_source_bind_p (stmt))
3218 gimple_debug_source_bind_set_var (stmt, t);
3219 /* When inlining and source bind refers to one of the optimized
3220 away parameters, change the source bind into normal debug bind
3221 referring to the corresponding DEBUG_EXPR_DECL that should have
3222 been bound before the call stmt. */
3223 t = gimple_debug_source_bind_get_value (stmt);
3224 if (t != NULL_TREE
3225 && TREE_CODE (t) == PARM_DECL
3226 && id->call_stmt)
3228 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3229 unsigned int i;
3230 if (debug_args != NULL)
3232 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3233 if ((**debug_args)[i] == DECL_ORIGIN (t)
3234 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3236 t = (**debug_args)[i + 1];
3237 stmt->subcode = GIMPLE_DEBUG_BIND;
3238 gimple_debug_bind_set_value (stmt, t);
3239 break;
3243 if (gimple_debug_source_bind_p (stmt))
3244 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3245 remap_gimple_op_r, &wi, NULL);
3248 processing_debug_stmt = 0;
3250 update_stmt (stmt);
3253 /* Process deferred debug stmts. In order to give values better odds
3254 of being successfully remapped, we delay the processing of debug
3255 stmts until all other stmts that might require remapping are
3256 processed. */
3258 static void
3259 copy_debug_stmts (copy_body_data *id)
3261 size_t i;
3262 gdebug *stmt;
3264 if (!id->debug_stmts.exists ())
3265 return;
3267 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3268 copy_debug_stmt (stmt, id);
3270 id->debug_stmts.release ();
3273 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3274 another function. */
3276 static tree
3277 copy_tree_body (copy_body_data *id)
3279 tree fndecl = id->src_fn;
3280 tree body = DECL_SAVED_TREE (fndecl);
3282 walk_tree (&body, copy_tree_body_r, id, NULL);
3284 return body;
3287 /* Make a copy of the body of FN so that it can be inserted inline in
3288 another function. */
3290 static tree
3291 copy_body (copy_body_data *id,
3292 basic_block entry_block_map, basic_block exit_block_map,
3293 basic_block new_entry)
3295 tree fndecl = id->src_fn;
3296 tree body;
3298 /* If this body has a CFG, walk CFG and copy. */
3299 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3300 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3301 new_entry);
3302 copy_debug_stmts (id);
3303 delete id->killed_new_ssa_names;
3304 id->killed_new_ssa_names = NULL;
3306 return body;
3309 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3310 defined in function FN, or of a data member thereof. */
3312 static bool
3313 self_inlining_addr_expr (tree value, tree fn)
3315 tree var;
3317 if (TREE_CODE (value) != ADDR_EXPR)
3318 return false;
3320 var = get_base_address (TREE_OPERAND (value, 0));
3322 return var && auto_var_in_fn_p (var, fn);
3325 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3326 lexical block and line number information from base_stmt, if given,
3327 or from the last stmt of the block otherwise. */
3329 static gimple *
3330 insert_init_debug_bind (copy_body_data *id,
3331 basic_block bb, tree var, tree value,
3332 gimple *base_stmt)
3334 gimple *note;
3335 gimple_stmt_iterator gsi;
3336 tree tracked_var;
3338 if (!gimple_in_ssa_p (id->src_cfun))
3339 return NULL;
3341 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3342 return NULL;
3344 tracked_var = target_for_debug_bind (var);
3345 if (!tracked_var)
3346 return NULL;
3348 if (bb)
3350 gsi = gsi_last_bb (bb);
3351 if (!base_stmt && !gsi_end_p (gsi))
3352 base_stmt = gsi_stmt (gsi);
3355 note = gimple_build_debug_bind (tracked_var,
3356 value == error_mark_node
3357 ? NULL_TREE : unshare_expr (value),
3358 base_stmt);
3360 if (bb)
3362 if (!gsi_end_p (gsi))
3363 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3364 else
3365 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3368 return note;
3371 static void
3372 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3374 /* If VAR represents a zero-sized variable, it's possible that the
3375 assignment statement may result in no gimple statements. */
3376 if (init_stmt)
3378 gimple_stmt_iterator si = gsi_last_bb (bb);
3380 /* We can end up with init statements that store to a non-register
3381 from a rhs with a conversion. Handle that here by forcing the
3382 rhs into a temporary. gimple_regimplify_operands is not
3383 prepared to do this for us. */
3384 if (!is_gimple_debug (init_stmt)
3385 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3386 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3387 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3389 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3390 gimple_expr_type (init_stmt),
3391 gimple_assign_rhs1 (init_stmt));
3392 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3393 GSI_NEW_STMT);
3394 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3395 gimple_assign_set_rhs1 (init_stmt, rhs);
3397 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3398 if (!is_gimple_debug (init_stmt))
3400 gimple_regimplify_operands (init_stmt, &si);
3402 tree def = gimple_assign_lhs (init_stmt);
3403 insert_init_debug_bind (id, bb, def, def, init_stmt);
3408 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3409 if need be (which should only be necessary for invalid programs). Attempt
3410 to convert VAL to TYPE and return the result if it is possible, just return
3411 a zero constant of the given type if it fails. */
3413 tree
3414 force_value_to_type (tree type, tree value)
3416 /* If we can match up types by promotion/demotion do so. */
3417 if (fold_convertible_p (type, value))
3418 return fold_convert (type, value);
3420 /* ??? For valid programs we should not end up here.
3421 Still if we end up with truly mismatched types here, fall back
3422 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3423 GIMPLE to the following passes. */
3424 if (TREE_CODE (value) == WITH_SIZE_EXPR)
3425 return error_mark_node;
3426 else if (!is_gimple_reg_type (TREE_TYPE (value))
3427 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3428 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3429 else
3430 return build_zero_cst (type);
3433 /* Initialize parameter P with VALUE. If needed, produce init statement
3434 at the end of BB. When BB is NULL, we return init statement to be
3435 output later. */
3436 static gimple *
3437 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3438 basic_block bb, tree *vars)
3440 gimple *init_stmt = NULL;
3441 tree var;
3442 tree def = (gimple_in_ssa_p (cfun)
3443 ? ssa_default_def (id->src_cfun, p) : NULL);
3445 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3446 here since the type of this decl must be visible to the calling
3447 function. */
3448 var = copy_decl_to_var (p, id);
3450 /* Declare this new variable. */
3451 DECL_CHAIN (var) = *vars;
3452 *vars = var;
3454 /* Make gimplifier happy about this variable. */
3455 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3457 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3458 we would not need to create a new variable here at all, if it
3459 weren't for debug info. Still, we can just use the argument
3460 value. */
3461 if (TREE_READONLY (p)
3462 && !TREE_ADDRESSABLE (p)
3463 && value && !TREE_SIDE_EFFECTS (value)
3464 && !def)
3466 /* We may produce non-gimple trees by adding NOPs or introduce
3467 invalid sharing when operand is not really constant.
3468 It is not big deal to prohibit constant propagation here as
3469 we will constant propagate in DOM1 pass anyway. */
3470 if (is_gimple_min_invariant (value)
3471 && useless_type_conversion_p (TREE_TYPE (p),
3472 TREE_TYPE (value))
3473 /* We have to be very careful about ADDR_EXPR. Make sure
3474 the base variable isn't a local variable of the inlined
3475 function, e.g., when doing recursive inlining, direct or
3476 mutually-recursive or whatever, which is why we don't
3477 just test whether fn == current_function_decl. */
3478 && ! self_inlining_addr_expr (value, fn))
3480 insert_decl_map (id, p, value);
3481 insert_debug_decl_map (id, p, var);
3482 return insert_init_debug_bind (id, bb, var, value, NULL);
3486 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3487 that way, when the PARM_DECL is encountered, it will be
3488 automatically replaced by the VAR_DECL. */
3489 insert_decl_map (id, p, var);
3491 /* Even if P was TREE_READONLY, the new VAR should not be.
3492 In the original code, we would have constructed a
3493 temporary, and then the function body would have never
3494 changed the value of P. However, now, we will be
3495 constructing VAR directly. The constructor body may
3496 change its value multiple times as it is being
3497 constructed. Therefore, it must not be TREE_READONLY;
3498 the back-end assumes that TREE_READONLY variable is
3499 assigned to only once. */
3500 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3501 TREE_READONLY (var) = 0;
3503 tree rhs = value;
3504 if (value
3505 && value != error_mark_node
3506 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3507 rhs = force_value_to_type (TREE_TYPE (p), value);
3509 /* If there is no setup required and we are in SSA, take the easy route
3510 replacing all SSA names representing the function parameter by the
3511 SSA name passed to function.
3513 We need to construct map for the variable anyway as it might be used
3514 in different SSA names when parameter is set in function.
3516 Do replacement at -O0 for const arguments replaced by constant.
3517 This is important for builtin_constant_p and other construct requiring
3518 constant argument to be visible in inlined function body. */
3519 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3520 && (optimize
3521 || (TREE_READONLY (p)
3522 && is_gimple_min_invariant (rhs)))
3523 && (TREE_CODE (rhs) == SSA_NAME
3524 || is_gimple_min_invariant (rhs))
3525 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3527 insert_decl_map (id, def, rhs);
3528 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3531 /* If the value of argument is never used, don't care about initializing
3532 it. */
3533 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3535 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3536 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3539 /* Initialize this VAR_DECL from the equivalent argument. Convert
3540 the argument to the proper type in case it was promoted. */
3541 if (value)
3543 if (rhs == error_mark_node)
3545 insert_decl_map (id, p, var);
3546 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3549 STRIP_USELESS_TYPE_CONVERSION (rhs);
3551 /* If we are in SSA form properly remap the default definition
3552 or assign to a dummy SSA name if the parameter is unused and
3553 we are not optimizing. */
3554 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3556 if (def)
3558 def = remap_ssa_name (def, id);
3559 init_stmt = gimple_build_assign (def, rhs);
3560 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3561 set_ssa_default_def (cfun, var, NULL);
3563 else if (!optimize)
3565 def = make_ssa_name (var);
3566 init_stmt = gimple_build_assign (def, rhs);
3569 else
3570 init_stmt = gimple_build_assign (var, rhs);
3572 if (bb && init_stmt)
3573 insert_init_stmt (id, bb, init_stmt);
3575 return init_stmt;
3578 /* Generate code to initialize the parameters of the function at the
3579 top of the stack in ID from the GIMPLE_CALL STMT. */
3581 static void
3582 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3583 tree fn, basic_block bb)
3585 tree parms;
3586 size_t i;
3587 tree p;
3588 tree vars = NULL_TREE;
3589 tree static_chain = gimple_call_chain (stmt);
3591 /* Figure out what the parameters are. */
3592 parms = DECL_ARGUMENTS (fn);
3594 /* Loop through the parameter declarations, replacing each with an
3595 equivalent VAR_DECL, appropriately initialized. */
3596 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3598 tree val;
3599 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3600 setup_one_parameter (id, p, val, fn, bb, &vars);
3602 /* After remapping parameters remap their types. This has to be done
3603 in a second loop over all parameters to appropriately remap
3604 variable sized arrays when the size is specified in a
3605 parameter following the array. */
3606 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3608 tree *varp = id->decl_map->get (p);
3609 if (varp && VAR_P (*varp))
3611 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3612 ? ssa_default_def (id->src_cfun, p) : NULL);
3613 tree var = *varp;
3614 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3615 /* Also remap the default definition if it was remapped
3616 to the default definition of the parameter replacement
3617 by the parameter setup. */
3618 if (def)
3620 tree *defp = id->decl_map->get (def);
3621 if (defp
3622 && TREE_CODE (*defp) == SSA_NAME
3623 && SSA_NAME_VAR (*defp) == var)
3624 TREE_TYPE (*defp) = TREE_TYPE (var);
3629 /* Initialize the static chain. */
3630 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3631 gcc_assert (fn != current_function_decl);
3632 if (p)
3634 /* No static chain? Seems like a bug in tree-nested.c. */
3635 gcc_assert (static_chain);
3637 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3640 declare_inline_vars (id->block, vars);
3644 /* Declare a return variable to replace the RESULT_DECL for the
3645 function we are calling. An appropriate DECL_STMT is returned.
3646 The USE_STMT is filled to contain a use of the declaration to
3647 indicate the return value of the function.
3649 RETURN_SLOT, if non-null is place where to store the result. It
3650 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3651 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3653 The return value is a (possibly null) value that holds the result
3654 as seen by the caller. */
3656 static tree
3657 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3658 basic_block entry_bb)
3660 tree callee = id->src_fn;
3661 tree result = DECL_RESULT (callee);
3662 tree callee_type = TREE_TYPE (result);
3663 tree caller_type;
3664 tree var, use;
3666 /* Handle type-mismatches in the function declaration return type
3667 vs. the call expression. */
3668 if (modify_dest)
3669 caller_type = TREE_TYPE (modify_dest);
3670 else if (return_slot)
3671 caller_type = TREE_TYPE (return_slot);
3672 else /* No LHS on the call. */
3673 caller_type = TREE_TYPE (TREE_TYPE (callee));
3675 /* We don't need to do anything for functions that don't return anything. */
3676 if (VOID_TYPE_P (callee_type))
3677 return NULL_TREE;
3679 /* If there was a return slot, then the return value is the
3680 dereferenced address of that object. */
3681 if (return_slot)
3683 /* The front end shouldn't have used both return_slot and
3684 a modify expression. */
3685 gcc_assert (!modify_dest);
3686 if (DECL_BY_REFERENCE (result))
3688 tree return_slot_addr = build_fold_addr_expr (return_slot);
3689 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3691 /* We are going to construct *&return_slot and we can't do that
3692 for variables believed to be not addressable.
3694 FIXME: This check possibly can match, because values returned
3695 via return slot optimization are not believed to have address
3696 taken by alias analysis. */
3697 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3698 var = return_slot_addr;
3699 mark_addressable (return_slot);
3701 else
3703 var = return_slot;
3704 gcc_assert (TREE_CODE (var) != SSA_NAME);
3705 if (TREE_ADDRESSABLE (result))
3706 mark_addressable (var);
3708 if (DECL_NOT_GIMPLE_REG_P (result)
3709 && DECL_P (var))
3710 DECL_NOT_GIMPLE_REG_P (var) = 1;
3712 if (!useless_type_conversion_p (callee_type, caller_type))
3713 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3715 use = NULL;
3716 goto done;
3719 /* All types requiring non-trivial constructors should have been handled. */
3720 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3722 /* Attempt to avoid creating a new temporary variable. */
3723 if (modify_dest
3724 && TREE_CODE (modify_dest) != SSA_NAME)
3726 bool use_it = false;
3728 /* We can't use MODIFY_DEST if there's type promotion involved. */
3729 if (!useless_type_conversion_p (callee_type, caller_type))
3730 use_it = false;
3732 /* ??? If we're assigning to a variable sized type, then we must
3733 reuse the destination variable, because we've no good way to
3734 create variable sized temporaries at this point. */
3735 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3736 use_it = true;
3738 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3739 reuse it as the result of the call directly. Don't do this if
3740 it would promote MODIFY_DEST to addressable. */
3741 else if (TREE_ADDRESSABLE (result))
3742 use_it = false;
3743 else
3745 tree base_m = get_base_address (modify_dest);
3747 /* If the base isn't a decl, then it's a pointer, and we don't
3748 know where that's going to go. */
3749 if (!DECL_P (base_m))
3750 use_it = false;
3751 else if (is_global_var (base_m))
3752 use_it = false;
3753 else if (DECL_NOT_GIMPLE_REG_P (result)
3754 && !DECL_NOT_GIMPLE_REG_P (base_m))
3755 use_it = false;
3756 else if (!TREE_ADDRESSABLE (base_m))
3757 use_it = true;
3760 if (use_it)
3762 var = modify_dest;
3763 use = NULL;
3764 goto done;
3768 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3770 var = copy_result_decl_to_var (result, id);
3771 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3773 /* Do not have the rest of GCC warn about this variable as it should
3774 not be visible to the user. */
3775 TREE_NO_WARNING (var) = 1;
3777 declare_inline_vars (id->block, var);
3779 /* Build the use expr. If the return type of the function was
3780 promoted, convert it back to the expected type. */
3781 use = var;
3782 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3784 /* If we can match up types by promotion/demotion do so. */
3785 if (fold_convertible_p (caller_type, var))
3786 use = fold_convert (caller_type, var);
3787 else
3789 /* ??? For valid programs we should not end up here.
3790 Still if we end up with truly mismatched types here, fall back
3791 to using a MEM_REF to not leak invalid GIMPLE to the following
3792 passes. */
3793 /* Prevent var from being written into SSA form. */
3794 if (is_gimple_reg_type (TREE_TYPE (var)))
3795 DECL_NOT_GIMPLE_REG_P (var) = true;
3796 use = fold_build2 (MEM_REF, caller_type,
3797 build_fold_addr_expr (var),
3798 build_int_cst (ptr_type_node, 0));
3802 STRIP_USELESS_TYPE_CONVERSION (use);
3804 if (DECL_BY_REFERENCE (result))
3806 TREE_ADDRESSABLE (var) = 1;
3807 var = build_fold_addr_expr (var);
3810 done:
3811 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3812 way, when the RESULT_DECL is encountered, it will be
3813 automatically replaced by the VAR_DECL.
3815 When returning by reference, ensure that RESULT_DECL remaps to
3816 gimple_val. */
3817 if (DECL_BY_REFERENCE (result)
3818 && !is_gimple_val (var))
3820 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3821 insert_decl_map (id, result, temp);
3822 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3823 it's default_def SSA_NAME. */
3824 if (gimple_in_ssa_p (id->src_cfun)
3825 && is_gimple_reg (result))
3827 temp = make_ssa_name (temp);
3828 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3830 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3832 else
3833 insert_decl_map (id, result, var);
3835 /* Remember this so we can ignore it in remap_decls. */
3836 id->retvar = var;
3837 return use;
3840 /* Determine if the function can be copied. If so return NULL. If
3841 not return a string describng the reason for failure. */
3843 const char *
3844 copy_forbidden (struct function *fun)
3846 const char *reason = fun->cannot_be_copied_reason;
3848 /* Only examine the function once. */
3849 if (fun->cannot_be_copied_set)
3850 return reason;
3852 /* We cannot copy a function that receives a non-local goto
3853 because we cannot remap the destination label used in the
3854 function that is performing the non-local goto. */
3855 /* ??? Actually, this should be possible, if we work at it.
3856 No doubt there's just a handful of places that simply
3857 assume it doesn't happen and don't substitute properly. */
3858 if (fun->has_nonlocal_label)
3860 reason = G_("function %q+F can never be copied "
3861 "because it receives a non-local goto");
3862 goto fail;
3865 if (fun->has_forced_label_in_static)
3867 reason = G_("function %q+F can never be copied because it saves "
3868 "address of local label in a static variable");
3869 goto fail;
3872 fail:
3873 fun->cannot_be_copied_reason = reason;
3874 fun->cannot_be_copied_set = true;
3875 return reason;
3879 static const char *inline_forbidden_reason;
3881 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3882 iff a function cannot be inlined. Also sets the reason why. */
3884 static tree
3885 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3886 struct walk_stmt_info *wip)
3888 tree fn = (tree) wip->info;
3889 tree t;
3890 gimple *stmt = gsi_stmt (*gsi);
3892 switch (gimple_code (stmt))
3894 case GIMPLE_CALL:
3895 /* Refuse to inline alloca call unless user explicitly forced so as
3896 this may change program's memory overhead drastically when the
3897 function using alloca is called in loop. In GCC present in
3898 SPEC2000 inlining into schedule_block cause it to require 2GB of
3899 RAM instead of 256MB. Don't do so for alloca calls emitted for
3900 VLA objects as those can't cause unbounded growth (they're always
3901 wrapped inside stack_save/stack_restore regions. */
3902 if (gimple_maybe_alloca_call_p (stmt)
3903 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3904 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3906 inline_forbidden_reason
3907 = G_("function %q+F can never be inlined because it uses "
3908 "alloca (override using the always_inline attribute)");
3909 *handled_ops_p = true;
3910 return fn;
3913 t = gimple_call_fndecl (stmt);
3914 if (t == NULL_TREE)
3915 break;
3917 /* We cannot inline functions that call setjmp. */
3918 if (setjmp_call_p (t))
3920 inline_forbidden_reason
3921 = G_("function %q+F can never be inlined because it uses setjmp");
3922 *handled_ops_p = true;
3923 return t;
3926 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3927 switch (DECL_FUNCTION_CODE (t))
3929 /* We cannot inline functions that take a variable number of
3930 arguments. */
3931 case BUILT_IN_VA_START:
3932 case BUILT_IN_NEXT_ARG:
3933 case BUILT_IN_VA_END:
3934 inline_forbidden_reason
3935 = G_("function %q+F can never be inlined because it "
3936 "uses variable argument lists");
3937 *handled_ops_p = true;
3938 return t;
3940 case BUILT_IN_LONGJMP:
3941 /* We can't inline functions that call __builtin_longjmp at
3942 all. The non-local goto machinery really requires the
3943 destination be in a different function. If we allow the
3944 function calling __builtin_longjmp to be inlined into the
3945 function calling __builtin_setjmp, Things will Go Awry. */
3946 inline_forbidden_reason
3947 = G_("function %q+F can never be inlined because "
3948 "it uses setjmp-longjmp exception handling");
3949 *handled_ops_p = true;
3950 return t;
3952 case BUILT_IN_NONLOCAL_GOTO:
3953 /* Similarly. */
3954 inline_forbidden_reason
3955 = G_("function %q+F can never be inlined because "
3956 "it uses non-local goto");
3957 *handled_ops_p = true;
3958 return t;
3960 case BUILT_IN_RETURN:
3961 case BUILT_IN_APPLY_ARGS:
3962 /* If a __builtin_apply_args caller would be inlined,
3963 it would be saving arguments of the function it has
3964 been inlined into. Similarly __builtin_return would
3965 return from the function the inline has been inlined into. */
3966 inline_forbidden_reason
3967 = G_("function %q+F can never be inlined because "
3968 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3969 *handled_ops_p = true;
3970 return t;
3972 default:
3973 break;
3975 break;
3977 case GIMPLE_GOTO:
3978 t = gimple_goto_dest (stmt);
3980 /* We will not inline a function which uses computed goto. The
3981 addresses of its local labels, which may be tucked into
3982 global storage, are of course not constant across
3983 instantiations, which causes unexpected behavior. */
3984 if (TREE_CODE (t) != LABEL_DECL)
3986 inline_forbidden_reason
3987 = G_("function %q+F can never be inlined "
3988 "because it contains a computed goto");
3989 *handled_ops_p = true;
3990 return t;
3992 break;
3994 default:
3995 break;
3998 *handled_ops_p = false;
3999 return NULL_TREE;
4002 /* Return true if FNDECL is a function that cannot be inlined into
4003 another one. */
4005 static bool
4006 inline_forbidden_p (tree fndecl)
4008 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
4009 struct walk_stmt_info wi;
4010 basic_block bb;
4011 bool forbidden_p = false;
4013 /* First check for shared reasons not to copy the code. */
4014 inline_forbidden_reason = copy_forbidden (fun);
4015 if (inline_forbidden_reason != NULL)
4016 return true;
4018 /* Next, walk the statements of the function looking for
4019 constraucts we can't handle, or are non-optimal for inlining. */
4020 hash_set<tree> visited_nodes;
4021 memset (&wi, 0, sizeof (wi));
4022 wi.info = (void *) fndecl;
4023 wi.pset = &visited_nodes;
4025 /* We cannot inline a function with a VLA typed argument or result since
4026 we have no implementation materializing a variable of such type in
4027 the caller. */
4028 if (COMPLETE_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4029 && !poly_int_tree_p (TYPE_SIZE (TREE_TYPE (TREE_TYPE (fndecl)))))
4031 inline_forbidden_reason
4032 = G_("function %q+F can never be inlined because "
4033 "it has a VLA return argument");
4034 return true;
4036 for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
4037 if (!poly_int_tree_p (DECL_SIZE (parm)))
4039 inline_forbidden_reason
4040 = G_("function %q+F can never be inlined because "
4041 "it has a VLA argument");
4042 return true;
4045 FOR_EACH_BB_FN (bb, fun)
4047 gimple *ret;
4048 gimple_seq seq = bb_seq (bb);
4049 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4050 forbidden_p = (ret != NULL);
4051 if (forbidden_p)
4052 break;
4055 return forbidden_p;
4058 /* Return false if the function FNDECL cannot be inlined on account of its
4059 attributes, true otherwise. */
4060 static bool
4061 function_attribute_inlinable_p (const_tree fndecl)
4063 if (targetm.attribute_table)
4065 const_tree a;
4067 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4069 const_tree name = get_attribute_name (a);
4070 int i;
4072 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4073 if (is_attribute_p (targetm.attribute_table[i].name, name))
4074 return targetm.function_attribute_inlinable_p (fndecl);
4078 return true;
4081 /* Returns nonzero if FN is a function that does not have any
4082 fundamental inline blocking properties. */
4084 bool
4085 tree_inlinable_function_p (tree fn)
4087 bool inlinable = true;
4088 bool do_warning;
4089 tree always_inline;
4091 /* If we've already decided this function shouldn't be inlined,
4092 there's no need to check again. */
4093 if (DECL_UNINLINABLE (fn))
4094 return false;
4096 /* We only warn for functions declared `inline' by the user. */
4097 do_warning = (opt_for_fn (fn, warn_inline)
4098 && DECL_DECLARED_INLINE_P (fn)
4099 && !DECL_NO_INLINE_WARNING_P (fn)
4100 && !DECL_IN_SYSTEM_HEADER (fn));
4102 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4104 if (flag_no_inline
4105 && always_inline == NULL)
4107 if (do_warning)
4108 warning (OPT_Winline, "function %q+F can never be inlined because it "
4109 "is suppressed using %<-fno-inline%>", fn);
4110 inlinable = false;
4113 else if (!function_attribute_inlinable_p (fn))
4115 if (do_warning)
4116 warning (OPT_Winline, "function %q+F can never be inlined because it "
4117 "uses attributes conflicting with inlining", fn);
4118 inlinable = false;
4121 else if (inline_forbidden_p (fn))
4123 /* See if we should warn about uninlinable functions. Previously,
4124 some of these warnings would be issued while trying to expand
4125 the function inline, but that would cause multiple warnings
4126 about functions that would for example call alloca. But since
4127 this a property of the function, just one warning is enough.
4128 As a bonus we can now give more details about the reason why a
4129 function is not inlinable. */
4130 if (always_inline)
4131 error (inline_forbidden_reason, fn);
4132 else if (do_warning)
4133 warning (OPT_Winline, inline_forbidden_reason, fn);
4135 inlinable = false;
4138 /* Squirrel away the result so that we don't have to check again. */
4139 DECL_UNINLINABLE (fn) = !inlinable;
4141 return inlinable;
4144 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4145 word size and take possible memcpy call into account and return
4146 cost based on whether optimizing for size or speed according to SPEED_P. */
4149 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4151 HOST_WIDE_INT size;
4153 gcc_assert (!VOID_TYPE_P (type));
4155 if (TREE_CODE (type) == VECTOR_TYPE)
4157 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4158 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4159 int orig_mode_size
4160 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4161 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4162 return ((orig_mode_size + simd_mode_size - 1)
4163 / simd_mode_size);
4166 size = int_size_in_bytes (type);
4168 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4169 /* Cost of a memcpy call, 3 arguments and the call. */
4170 return 4;
4171 else
4172 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4175 /* Returns cost of operation CODE, according to WEIGHTS */
4177 static int
4178 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4179 tree op1 ATTRIBUTE_UNUSED, tree op2)
4181 switch (code)
4183 /* These are "free" conversions, or their presumed cost
4184 is folded into other operations. */
4185 case RANGE_EXPR:
4186 CASE_CONVERT:
4187 case COMPLEX_EXPR:
4188 case PAREN_EXPR:
4189 case VIEW_CONVERT_EXPR:
4190 return 0;
4192 /* Assign cost of 1 to usual operations.
4193 ??? We may consider mapping RTL costs to this. */
4194 case COND_EXPR:
4195 case VEC_COND_EXPR:
4196 case VEC_PERM_EXPR:
4198 case PLUS_EXPR:
4199 case POINTER_PLUS_EXPR:
4200 case POINTER_DIFF_EXPR:
4201 case MINUS_EXPR:
4202 case MULT_EXPR:
4203 case MULT_HIGHPART_EXPR:
4205 case ADDR_SPACE_CONVERT_EXPR:
4206 case FIXED_CONVERT_EXPR:
4207 case FIX_TRUNC_EXPR:
4209 case NEGATE_EXPR:
4210 case FLOAT_EXPR:
4211 case MIN_EXPR:
4212 case MAX_EXPR:
4213 case ABS_EXPR:
4214 case ABSU_EXPR:
4216 case LSHIFT_EXPR:
4217 case RSHIFT_EXPR:
4218 case LROTATE_EXPR:
4219 case RROTATE_EXPR:
4221 case BIT_IOR_EXPR:
4222 case BIT_XOR_EXPR:
4223 case BIT_AND_EXPR:
4224 case BIT_NOT_EXPR:
4226 case TRUTH_ANDIF_EXPR:
4227 case TRUTH_ORIF_EXPR:
4228 case TRUTH_AND_EXPR:
4229 case TRUTH_OR_EXPR:
4230 case TRUTH_XOR_EXPR:
4231 case TRUTH_NOT_EXPR:
4233 case LT_EXPR:
4234 case LE_EXPR:
4235 case GT_EXPR:
4236 case GE_EXPR:
4237 case EQ_EXPR:
4238 case NE_EXPR:
4239 case ORDERED_EXPR:
4240 case UNORDERED_EXPR:
4242 case UNLT_EXPR:
4243 case UNLE_EXPR:
4244 case UNGT_EXPR:
4245 case UNGE_EXPR:
4246 case UNEQ_EXPR:
4247 case LTGT_EXPR:
4249 case CONJ_EXPR:
4251 case PREDECREMENT_EXPR:
4252 case PREINCREMENT_EXPR:
4253 case POSTDECREMENT_EXPR:
4254 case POSTINCREMENT_EXPR:
4256 case REALIGN_LOAD_EXPR:
4258 case WIDEN_PLUS_EXPR:
4259 case WIDEN_MINUS_EXPR:
4260 case WIDEN_SUM_EXPR:
4261 case WIDEN_MULT_EXPR:
4262 case DOT_PROD_EXPR:
4263 case SAD_EXPR:
4264 case WIDEN_MULT_PLUS_EXPR:
4265 case WIDEN_MULT_MINUS_EXPR:
4266 case WIDEN_LSHIFT_EXPR:
4268 case VEC_WIDEN_PLUS_HI_EXPR:
4269 case VEC_WIDEN_PLUS_LO_EXPR:
4270 case VEC_WIDEN_MINUS_HI_EXPR:
4271 case VEC_WIDEN_MINUS_LO_EXPR:
4272 case VEC_WIDEN_MULT_HI_EXPR:
4273 case VEC_WIDEN_MULT_LO_EXPR:
4274 case VEC_WIDEN_MULT_EVEN_EXPR:
4275 case VEC_WIDEN_MULT_ODD_EXPR:
4276 case VEC_UNPACK_HI_EXPR:
4277 case VEC_UNPACK_LO_EXPR:
4278 case VEC_UNPACK_FLOAT_HI_EXPR:
4279 case VEC_UNPACK_FLOAT_LO_EXPR:
4280 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4281 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4282 case VEC_PACK_TRUNC_EXPR:
4283 case VEC_PACK_SAT_EXPR:
4284 case VEC_PACK_FIX_TRUNC_EXPR:
4285 case VEC_PACK_FLOAT_EXPR:
4286 case VEC_WIDEN_LSHIFT_HI_EXPR:
4287 case VEC_WIDEN_LSHIFT_LO_EXPR:
4288 case VEC_DUPLICATE_EXPR:
4289 case VEC_SERIES_EXPR:
4291 return 1;
4293 /* Few special cases of expensive operations. This is useful
4294 to avoid inlining on functions having too many of these. */
4295 case TRUNC_DIV_EXPR:
4296 case CEIL_DIV_EXPR:
4297 case FLOOR_DIV_EXPR:
4298 case ROUND_DIV_EXPR:
4299 case EXACT_DIV_EXPR:
4300 case TRUNC_MOD_EXPR:
4301 case CEIL_MOD_EXPR:
4302 case FLOOR_MOD_EXPR:
4303 case ROUND_MOD_EXPR:
4304 case RDIV_EXPR:
4305 if (TREE_CODE (op2) != INTEGER_CST)
4306 return weights->div_mod_cost;
4307 return 1;
4309 /* Bit-field insertion needs several shift and mask operations. */
4310 case BIT_INSERT_EXPR:
4311 return 3;
4313 default:
4314 /* We expect a copy assignment with no operator. */
4315 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4316 return 0;
4321 /* Estimate number of instructions that will be created by expanding
4322 the statements in the statement sequence STMTS.
4323 WEIGHTS contains weights attributed to various constructs. */
4326 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4328 int cost;
4329 gimple_stmt_iterator gsi;
4331 cost = 0;
4332 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4333 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4335 return cost;
4339 /* Estimate number of instructions that will be created by expanding STMT.
4340 WEIGHTS contains weights attributed to various constructs. */
4343 estimate_num_insns (gimple *stmt, eni_weights *weights)
4345 unsigned cost, i;
4346 enum gimple_code code = gimple_code (stmt);
4347 tree lhs;
4348 tree rhs;
4350 switch (code)
4352 case GIMPLE_ASSIGN:
4353 /* Try to estimate the cost of assignments. We have three cases to
4354 deal with:
4355 1) Simple assignments to registers;
4356 2) Stores to things that must live in memory. This includes
4357 "normal" stores to scalars, but also assignments of large
4358 structures, or constructors of big arrays;
4360 Let us look at the first two cases, assuming we have "a = b + C":
4361 <GIMPLE_ASSIGN <var_decl "a">
4362 <plus_expr <var_decl "b"> <constant C>>
4363 If "a" is a GIMPLE register, the assignment to it is free on almost
4364 any target, because "a" usually ends up in a real register. Hence
4365 the only cost of this expression comes from the PLUS_EXPR, and we
4366 can ignore the GIMPLE_ASSIGN.
4367 If "a" is not a GIMPLE register, the assignment to "a" will most
4368 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4369 of moving something into "a", which we compute using the function
4370 estimate_move_cost. */
4371 if (gimple_clobber_p (stmt))
4372 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4374 lhs = gimple_assign_lhs (stmt);
4375 rhs = gimple_assign_rhs1 (stmt);
4377 cost = 0;
4379 /* Account for the cost of moving to / from memory. */
4380 if (gimple_store_p (stmt))
4381 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4382 if (gimple_assign_load_p (stmt))
4383 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4385 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4386 gimple_assign_rhs1 (stmt),
4387 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4388 == GIMPLE_BINARY_RHS
4389 ? gimple_assign_rhs2 (stmt) : NULL);
4390 break;
4392 case GIMPLE_COND:
4393 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4394 gimple_op (stmt, 0),
4395 gimple_op (stmt, 1));
4396 break;
4398 case GIMPLE_SWITCH:
4400 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4401 /* Take into account cost of the switch + guess 2 conditional jumps for
4402 each case label.
4404 TODO: once the switch expansion logic is sufficiently separated, we can
4405 do better job on estimating cost of the switch. */
4406 if (weights->time_based)
4407 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4408 else
4409 cost = gimple_switch_num_labels (switch_stmt) * 2;
4411 break;
4413 case GIMPLE_CALL:
4415 tree decl;
4417 if (gimple_call_internal_p (stmt))
4418 return 0;
4419 else if ((decl = gimple_call_fndecl (stmt))
4420 && fndecl_built_in_p (decl))
4422 /* Do not special case builtins where we see the body.
4423 This just confuse inliner. */
4424 struct cgraph_node *node;
4425 if (!(node = cgraph_node::get (decl))
4426 || node->definition)
4428 /* For buitins that are likely expanded to nothing or
4429 inlined do not account operand costs. */
4430 else if (is_simple_builtin (decl))
4431 return 0;
4432 else if (is_inexpensive_builtin (decl))
4433 return weights->target_builtin_call_cost;
4434 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4436 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4437 specialize the cheap expansion we do here.
4438 ??? This asks for a more general solution. */
4439 switch (DECL_FUNCTION_CODE (decl))
4441 case BUILT_IN_POW:
4442 case BUILT_IN_POWF:
4443 case BUILT_IN_POWL:
4444 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4445 && (real_equal
4446 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4447 &dconst2)))
4448 return estimate_operator_cost
4449 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4450 gimple_call_arg (stmt, 0));
4451 break;
4453 default:
4454 break;
4459 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4460 if (gimple_call_lhs (stmt))
4461 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4462 weights->time_based);
4463 for (i = 0; i < gimple_call_num_args (stmt); i++)
4465 tree arg = gimple_call_arg (stmt, i);
4466 cost += estimate_move_cost (TREE_TYPE (arg),
4467 weights->time_based);
4469 break;
4472 case GIMPLE_RETURN:
4473 return weights->return_cost;
4475 case GIMPLE_GOTO:
4476 case GIMPLE_LABEL:
4477 case GIMPLE_NOP:
4478 case GIMPLE_PHI:
4479 case GIMPLE_PREDICT:
4480 case GIMPLE_DEBUG:
4481 return 0;
4483 case GIMPLE_ASM:
4485 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4486 /* 1000 means infinity. This avoids overflows later
4487 with very long asm statements. */
4488 if (count > 1000)
4489 count = 1000;
4490 /* If this asm is asm inline, count anything as minimum size. */
4491 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4492 count = MIN (1, count);
4493 return MAX (1, count);
4496 case GIMPLE_RESX:
4497 /* This is either going to be an external function call with one
4498 argument, or two register copy statements plus a goto. */
4499 return 2;
4501 case GIMPLE_EH_DISPATCH:
4502 /* ??? This is going to turn into a switch statement. Ideally
4503 we'd have a look at the eh region and estimate the number of
4504 edges involved. */
4505 return 10;
4507 case GIMPLE_BIND:
4508 return estimate_num_insns_seq (
4509 gimple_bind_body (as_a <gbind *> (stmt)),
4510 weights);
4512 case GIMPLE_EH_FILTER:
4513 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4515 case GIMPLE_CATCH:
4516 return estimate_num_insns_seq (gimple_catch_handler (
4517 as_a <gcatch *> (stmt)),
4518 weights);
4520 case GIMPLE_TRY:
4521 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4522 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4524 /* OMP directives are generally very expensive. */
4526 case GIMPLE_OMP_RETURN:
4527 case GIMPLE_OMP_SECTIONS_SWITCH:
4528 case GIMPLE_OMP_ATOMIC_STORE:
4529 case GIMPLE_OMP_CONTINUE:
4530 /* ...except these, which are cheap. */
4531 return 0;
4533 case GIMPLE_OMP_ATOMIC_LOAD:
4534 return weights->omp_cost;
4536 case GIMPLE_OMP_FOR:
4537 return (weights->omp_cost
4538 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4539 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4541 case GIMPLE_OMP_PARALLEL:
4542 case GIMPLE_OMP_TASK:
4543 case GIMPLE_OMP_CRITICAL:
4544 case GIMPLE_OMP_MASTER:
4545 case GIMPLE_OMP_TASKGROUP:
4546 case GIMPLE_OMP_ORDERED:
4547 case GIMPLE_OMP_SCAN:
4548 case GIMPLE_OMP_SECTION:
4549 case GIMPLE_OMP_SECTIONS:
4550 case GIMPLE_OMP_SINGLE:
4551 case GIMPLE_OMP_TARGET:
4552 case GIMPLE_OMP_TEAMS:
4553 return (weights->omp_cost
4554 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4556 case GIMPLE_TRANSACTION:
4557 return (weights->tm_cost
4558 + estimate_num_insns_seq (gimple_transaction_body (
4559 as_a <gtransaction *> (stmt)),
4560 weights));
4562 default:
4563 gcc_unreachable ();
4566 return cost;
4569 /* Estimate number of instructions that will be created by expanding
4570 function FNDECL. WEIGHTS contains weights attributed to various
4571 constructs. */
4574 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4576 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4577 gimple_stmt_iterator bsi;
4578 basic_block bb;
4579 int n = 0;
4581 gcc_assert (my_function && my_function->cfg);
4582 FOR_EACH_BB_FN (bb, my_function)
4584 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4585 n += estimate_num_insns (gsi_stmt (bsi), weights);
4588 return n;
4592 /* Initializes weights used by estimate_num_insns. */
4594 void
4595 init_inline_once (void)
4597 eni_size_weights.call_cost = 1;
4598 eni_size_weights.indirect_call_cost = 3;
4599 eni_size_weights.target_builtin_call_cost = 1;
4600 eni_size_weights.div_mod_cost = 1;
4601 eni_size_weights.omp_cost = 40;
4602 eni_size_weights.tm_cost = 10;
4603 eni_size_weights.time_based = false;
4604 eni_size_weights.return_cost = 1;
4606 /* Estimating time for call is difficult, since we have no idea what the
4607 called function does. In the current uses of eni_time_weights,
4608 underestimating the cost does less harm than overestimating it, so
4609 we choose a rather small value here. */
4610 eni_time_weights.call_cost = 10;
4611 eni_time_weights.indirect_call_cost = 15;
4612 eni_time_weights.target_builtin_call_cost = 1;
4613 eni_time_weights.div_mod_cost = 10;
4614 eni_time_weights.omp_cost = 40;
4615 eni_time_weights.tm_cost = 40;
4616 eni_time_weights.time_based = true;
4617 eni_time_weights.return_cost = 2;
4621 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4623 static void
4624 prepend_lexical_block (tree current_block, tree new_block)
4626 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4627 BLOCK_SUBBLOCKS (current_block) = new_block;
4628 BLOCK_SUPERCONTEXT (new_block) = current_block;
4631 /* Add local variables from CALLEE to CALLER. */
4633 static inline void
4634 add_local_variables (struct function *callee, struct function *caller,
4635 copy_body_data *id)
4637 tree var;
4638 unsigned ix;
4640 FOR_EACH_LOCAL_DECL (callee, ix, var)
4641 if (!can_be_nonlocal (var, id))
4643 tree new_var = remap_decl (var, id);
4645 /* Remap debug-expressions. */
4646 if (VAR_P (new_var)
4647 && DECL_HAS_DEBUG_EXPR_P (var)
4648 && new_var != var)
4650 tree tem = DECL_DEBUG_EXPR (var);
4651 bool old_regimplify = id->regimplify;
4652 id->remapping_type_depth++;
4653 walk_tree (&tem, copy_tree_body_r, id, NULL);
4654 id->remapping_type_depth--;
4655 id->regimplify = old_regimplify;
4656 SET_DECL_DEBUG_EXPR (new_var, tem);
4657 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4659 add_local_decl (caller, new_var);
4663 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4664 have brought in or introduced any debug stmts for SRCVAR. */
4666 static inline void
4667 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4669 tree *remappedvarp = id->decl_map->get (srcvar);
4671 if (!remappedvarp)
4672 return;
4674 if (!VAR_P (*remappedvarp))
4675 return;
4677 if (*remappedvarp == id->retvar)
4678 return;
4680 tree tvar = target_for_debug_bind (*remappedvarp);
4681 if (!tvar)
4682 return;
4684 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4685 id->call_stmt);
4686 gimple_seq_add_stmt (bindings, stmt);
4689 /* For each inlined variable for which we may have debug bind stmts,
4690 add before GSI a final debug stmt resetting it, marking the end of
4691 its life, so that var-tracking knows it doesn't have to compute
4692 further locations for it. */
4694 static inline void
4695 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4697 tree var;
4698 unsigned ix;
4699 gimple_seq bindings = NULL;
4701 if (!gimple_in_ssa_p (id->src_cfun))
4702 return;
4704 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4705 return;
4707 for (var = DECL_ARGUMENTS (id->src_fn);
4708 var; var = DECL_CHAIN (var))
4709 reset_debug_binding (id, var, &bindings);
4711 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4712 reset_debug_binding (id, var, &bindings);
4714 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4717 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4719 static bool
4720 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4721 bitmap to_purge)
4723 tree use_retvar;
4724 tree fn;
4725 hash_map<tree, tree> *dst;
4726 hash_map<tree, tree> *st = NULL;
4727 tree return_slot;
4728 tree modify_dest;
4729 struct cgraph_edge *cg_edge;
4730 cgraph_inline_failed_t reason;
4731 basic_block return_block;
4732 edge e;
4733 gimple_stmt_iterator gsi, stmt_gsi;
4734 bool successfully_inlined = false;
4735 bool purge_dead_abnormal_edges;
4736 gcall *call_stmt;
4737 unsigned int prop_mask, src_properties;
4738 struct function *dst_cfun;
4739 tree simduid;
4740 use_operand_p use;
4741 gimple *simtenter_stmt = NULL;
4742 vec<tree> *simtvars_save;
4743 clone_info *info;
4745 /* The gimplifier uses input_location in too many places, such as
4746 internal_get_tmp_var (). */
4747 location_t saved_location = input_location;
4748 input_location = gimple_location (stmt);
4750 /* From here on, we're only interested in CALL_EXPRs. */
4751 call_stmt = dyn_cast <gcall *> (stmt);
4752 if (!call_stmt)
4753 goto egress;
4755 cg_edge = id->dst_node->get_edge (stmt);
4756 gcc_checking_assert (cg_edge);
4757 /* First, see if we can figure out what function is being called.
4758 If we cannot, then there is no hope of inlining the function. */
4759 if (cg_edge->indirect_unknown_callee)
4760 goto egress;
4761 fn = cg_edge->callee->decl;
4762 gcc_checking_assert (fn);
4764 /* If FN is a declaration of a function in a nested scope that was
4765 globally declared inline, we don't set its DECL_INITIAL.
4766 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4767 C++ front-end uses it for cdtors to refer to their internal
4768 declarations, that are not real functions. Fortunately those
4769 don't have trees to be saved, so we can tell by checking their
4770 gimple_body. */
4771 if (!DECL_INITIAL (fn)
4772 && DECL_ABSTRACT_ORIGIN (fn)
4773 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4774 fn = DECL_ABSTRACT_ORIGIN (fn);
4776 /* Don't try to inline functions that are not well-suited to inlining. */
4777 if (cg_edge->inline_failed)
4779 reason = cg_edge->inline_failed;
4780 /* If this call was originally indirect, we do not want to emit any
4781 inlining related warnings or sorry messages because there are no
4782 guarantees regarding those. */
4783 if (cg_edge->indirect_inlining_edge)
4784 goto egress;
4786 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4787 /* For extern inline functions that get redefined we always
4788 silently ignored always_inline flag. Better behavior would
4789 be to be able to keep both bodies and use extern inline body
4790 for inlining, but we can't do that because frontends overwrite
4791 the body. */
4792 && !cg_edge->callee->redefined_extern_inline
4793 /* During early inline pass, report only when optimization is
4794 not turned on. */
4795 && (symtab->global_info_ready
4796 || !optimize
4797 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4798 /* PR 20090218-1_0.c. Body can be provided by another module. */
4799 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4801 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4802 cgraph_inline_failed_string (reason));
4803 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4804 inform (gimple_location (stmt), "called from here");
4805 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4806 inform (DECL_SOURCE_LOCATION (cfun->decl),
4807 "called from this function");
4809 else if (opt_for_fn (fn, warn_inline)
4810 && DECL_DECLARED_INLINE_P (fn)
4811 && !DECL_NO_INLINE_WARNING_P (fn)
4812 && !DECL_IN_SYSTEM_HEADER (fn)
4813 && reason != CIF_UNSPECIFIED
4814 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4815 /* Do not warn about not inlined recursive calls. */
4816 && !cg_edge->recursive_p ()
4817 /* Avoid warnings during early inline pass. */
4818 && symtab->global_info_ready)
4820 auto_diagnostic_group d;
4821 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4822 fn, _(cgraph_inline_failed_string (reason))))
4824 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4825 inform (gimple_location (stmt), "called from here");
4826 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4827 inform (DECL_SOURCE_LOCATION (cfun->decl),
4828 "called from this function");
4831 goto egress;
4833 id->src_node = cg_edge->callee;
4835 /* If callee is thunk, all we need is to adjust the THIS pointer
4836 and redirect to function being thunked. */
4837 if (id->src_node->thunk)
4839 cgraph_edge *edge;
4840 tree virtual_offset = NULL;
4841 profile_count count = cg_edge->count;
4842 tree op;
4843 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4844 thunk_info *info = thunk_info::get (id->src_node);
4846 cgraph_edge::remove (cg_edge);
4847 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4848 gimple_uid (stmt),
4849 profile_count::one (),
4850 profile_count::one (),
4851 true);
4852 edge->count = count;
4853 if (info->virtual_offset_p)
4854 virtual_offset = size_int (info->virtual_value);
4855 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4856 NULL);
4857 gsi_insert_before (&iter, gimple_build_assign (op,
4858 gimple_call_arg (stmt, 0)),
4859 GSI_NEW_STMT);
4860 gcc_assert (info->this_adjusting);
4861 op = thunk_adjust (&iter, op, 1, info->fixed_offset,
4862 virtual_offset, info->indirect_offset);
4864 gimple_call_set_arg (stmt, 0, op);
4865 gimple_call_set_fndecl (stmt, edge->callee->decl);
4866 update_stmt (stmt);
4867 id->src_node->remove ();
4868 successfully_inlined = expand_call_inline (bb, stmt, id, to_purge);
4869 maybe_remove_unused_call_args (cfun, stmt);
4870 /* This used to return true even though we do fail to inline in
4871 some cases. See PR98525. */
4872 goto egress;
4874 fn = cg_edge->callee->decl;
4875 cg_edge->callee->get_untransformed_body ();
4877 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4878 cg_edge->callee->verify ();
4880 /* We will be inlining this callee. */
4881 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4883 /* Update the callers EH personality. */
4884 if (DECL_FUNCTION_PERSONALITY (fn))
4885 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4886 = DECL_FUNCTION_PERSONALITY (fn);
4888 /* Split the block before the GIMPLE_CALL. */
4889 stmt_gsi = gsi_for_stmt (stmt);
4890 gsi_prev (&stmt_gsi);
4891 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4892 bb = e->src;
4893 return_block = e->dest;
4894 remove_edge (e);
4896 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4897 been the source of abnormal edges. In this case, schedule
4898 the removal of dead abnormal edges. */
4899 gsi = gsi_start_bb (return_block);
4900 gsi_next (&gsi);
4901 purge_dead_abnormal_edges = gsi_end_p (gsi);
4903 stmt_gsi = gsi_start_bb (return_block);
4905 /* Build a block containing code to initialize the arguments, the
4906 actual inline expansion of the body, and a label for the return
4907 statements within the function to jump to. The type of the
4908 statement expression is the return type of the function call.
4909 ??? If the call does not have an associated block then we will
4910 remap all callee blocks to NULL, effectively dropping most of
4911 its debug information. This should only happen for calls to
4912 artificial decls inserted by the compiler itself. We need to
4913 either link the inlined blocks into the caller block tree or
4914 not refer to them in any way to not break GC for locations. */
4915 if (tree block = gimple_block (stmt))
4917 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4918 to make inlined_function_outer_scope_p return true on this BLOCK. */
4919 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4920 if (loc == UNKNOWN_LOCATION)
4921 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4922 if (loc == UNKNOWN_LOCATION)
4923 loc = BUILTINS_LOCATION;
4924 id->block = make_node (BLOCK);
4925 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4926 BLOCK_SOURCE_LOCATION (id->block) = loc;
4927 prepend_lexical_block (block, id->block);
4930 /* Local declarations will be replaced by their equivalents in this map. */
4931 st = id->decl_map;
4932 id->decl_map = new hash_map<tree, tree>;
4933 dst = id->debug_map;
4934 id->debug_map = NULL;
4935 if (flag_stack_reuse != SR_NONE)
4936 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4938 /* Record the function we are about to inline. */
4939 id->src_fn = fn;
4940 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4941 id->reset_location = DECL_IGNORED_P (fn);
4942 id->call_stmt = call_stmt;
4944 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4945 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4946 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4947 simtvars_save = id->dst_simt_vars;
4948 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4949 && (simduid = bb->loop_father->simduid) != NULL_TREE
4950 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4951 && single_imm_use (simduid, &use, &simtenter_stmt)
4952 && is_gimple_call (simtenter_stmt)
4953 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4954 vec_alloc (id->dst_simt_vars, 0);
4955 else
4956 id->dst_simt_vars = NULL;
4958 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4959 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4961 /* If the src function contains an IFN_VA_ARG, then so will the dst
4962 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4963 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4964 src_properties = id->src_cfun->curr_properties & prop_mask;
4965 if (src_properties != prop_mask)
4966 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4967 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4968 id->dst_node->calls_declare_variant_alt
4969 |= id->src_node->calls_declare_variant_alt;
4971 gcc_assert (!id->src_cfun->after_inlining);
4973 id->entry_bb = bb;
4974 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4976 gimple_stmt_iterator si = gsi_last_bb (bb);
4977 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4978 NOT_TAKEN),
4979 GSI_NEW_STMT);
4981 initialize_inlined_parameters (id, stmt, fn, bb);
4982 if (debug_nonbind_markers_p && debug_inline_points && id->block
4983 && inlined_function_outer_scope_p (id->block))
4985 gimple_stmt_iterator si = gsi_last_bb (bb);
4986 gsi_insert_after (&si, gimple_build_debug_inline_entry
4987 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4988 GSI_NEW_STMT);
4991 if (DECL_INITIAL (fn))
4993 if (gimple_block (stmt))
4995 tree *var;
4997 prepend_lexical_block (id->block,
4998 remap_blocks (DECL_INITIAL (fn), id));
4999 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
5000 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
5001 == NULL_TREE));
5002 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
5003 otherwise for DWARF DW_TAG_formal_parameter will not be children of
5004 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
5005 under it. The parameters can be then evaluated in the debugger,
5006 but don't show in backtraces. */
5007 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
5008 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
5010 tree v = *var;
5011 *var = TREE_CHAIN (v);
5012 TREE_CHAIN (v) = BLOCK_VARS (id->block);
5013 BLOCK_VARS (id->block) = v;
5015 else
5016 var = &TREE_CHAIN (*var);
5018 else
5019 remap_blocks_to_null (DECL_INITIAL (fn), id);
5022 /* Return statements in the function body will be replaced by jumps
5023 to the RET_LABEL. */
5024 gcc_assert (DECL_INITIAL (fn));
5025 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
5027 /* Find the LHS to which the result of this call is assigned. */
5028 return_slot = NULL;
5029 if (gimple_call_lhs (stmt))
5031 modify_dest = gimple_call_lhs (stmt);
5033 /* The function which we are inlining might not return a value,
5034 in which case we should issue a warning that the function
5035 does not return a value. In that case the optimizers will
5036 see that the variable to which the value is assigned was not
5037 initialized. We do not want to issue a warning about that
5038 uninitialized variable. */
5039 if (DECL_P (modify_dest))
5040 TREE_NO_WARNING (modify_dest) = 1;
5042 if (gimple_call_return_slot_opt_p (call_stmt))
5044 return_slot = modify_dest;
5045 modify_dest = NULL;
5048 else
5049 modify_dest = NULL;
5051 /* If we are inlining a call to the C++ operator new, we don't want
5052 to use type based alias analysis on the return value. Otherwise
5053 we may get confused if the compiler sees that the inlined new
5054 function returns a pointer which was just deleted. See bug
5055 33407. */
5056 if (DECL_IS_OPERATOR_NEW_P (fn))
5058 return_slot = NULL;
5059 modify_dest = NULL;
5062 /* Declare the return variable for the function. */
5063 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5065 /* Add local vars in this inlined callee to caller. */
5066 add_local_variables (id->src_cfun, cfun, id);
5068 info = clone_info::get (id->src_node);
5069 if (info && info->performed_splits)
5071 clone_info *dst_info = clone_info::get_create (id->dst_node);
5072 /* Any calls from the inlined function will be turned into calls from the
5073 function we inline into. We must preserve notes about how to split
5074 parameters such calls should be redirected/updated. */
5075 unsigned len = vec_safe_length (info->performed_splits);
5076 for (unsigned i = 0; i < len; i++)
5078 ipa_param_performed_split ps
5079 = (*info->performed_splits)[i];
5080 ps.dummy_decl = remap_decl (ps.dummy_decl, id);
5081 vec_safe_push (dst_info->performed_splits, ps);
5084 if (flag_checking)
5086 len = vec_safe_length (dst_info->performed_splits);
5087 for (unsigned i = 0; i < len; i++)
5089 ipa_param_performed_split *ps1
5090 = &(*dst_info->performed_splits)[i];
5091 for (unsigned j = i + 1; j < len; j++)
5093 ipa_param_performed_split *ps2
5094 = &(*dst_info->performed_splits)[j];
5095 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
5096 || ps1->unit_offset != ps2->unit_offset);
5102 if (dump_enabled_p ())
5104 char buf[128];
5105 snprintf (buf, sizeof(buf), "%4.2f",
5106 cg_edge->sreal_frequency ().to_double ());
5107 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5108 call_stmt,
5109 "Inlining %C to %C with frequency %s\n",
5110 id->src_node, id->dst_node, buf);
5111 if (dump_file && (dump_flags & TDF_DETAILS))
5113 id->src_node->dump (dump_file);
5114 id->dst_node->dump (dump_file);
5118 /* This is it. Duplicate the callee body. Assume callee is
5119 pre-gimplified. Note that we must not alter the caller
5120 function in any way before this point, as this CALL_EXPR may be
5121 a self-referential call; if we're calling ourselves, we need to
5122 duplicate our body before altering anything. */
5123 copy_body (id, bb, return_block, NULL);
5125 reset_debug_bindings (id, stmt_gsi);
5127 if (flag_stack_reuse != SR_NONE)
5128 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5129 if (!TREE_THIS_VOLATILE (p))
5131 tree *varp = id->decl_map->get (p);
5132 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
5134 tree clobber = build_clobber (TREE_TYPE (*varp));
5135 gimple *clobber_stmt;
5136 clobber_stmt = gimple_build_assign (*varp, clobber);
5137 gimple_set_location (clobber_stmt, gimple_location (stmt));
5138 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5142 /* Reset the escaped solution. */
5143 if (cfun->gimple_df)
5144 pt_solution_reset (&cfun->gimple_df->escaped);
5146 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5147 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5149 size_t nargs = gimple_call_num_args (simtenter_stmt);
5150 vec<tree> *vars = id->dst_simt_vars;
5151 auto_vec<tree> newargs (nargs + vars->length ());
5152 for (size_t i = 0; i < nargs; i++)
5153 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5154 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5156 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5157 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5159 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5160 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5161 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5162 gsi_replace (&gsi, g, false);
5164 vec_free (id->dst_simt_vars);
5165 id->dst_simt_vars = simtvars_save;
5167 /* Clean up. */
5168 if (id->debug_map)
5170 delete id->debug_map;
5171 id->debug_map = dst;
5173 delete id->decl_map;
5174 id->decl_map = st;
5176 /* Unlink the calls virtual operands before replacing it. */
5177 unlink_stmt_vdef (stmt);
5178 if (gimple_vdef (stmt)
5179 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5180 release_ssa_name (gimple_vdef (stmt));
5182 /* If the inlined function returns a result that we care about,
5183 substitute the GIMPLE_CALL with an assignment of the return
5184 variable to the LHS of the call. That is, if STMT was
5185 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5186 if (use_retvar && gimple_call_lhs (stmt))
5188 gimple *old_stmt = stmt;
5189 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5190 gimple_set_location (stmt, gimple_location (old_stmt));
5191 gsi_replace (&stmt_gsi, stmt, false);
5192 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5193 /* Append a clobber for id->retvar if easily possible. */
5194 if (flag_stack_reuse != SR_NONE
5195 && id->retvar
5196 && VAR_P (id->retvar)
5197 && id->retvar != return_slot
5198 && id->retvar != modify_dest
5199 && !TREE_THIS_VOLATILE (id->retvar)
5200 && !is_gimple_reg (id->retvar)
5201 && !stmt_ends_bb_p (stmt))
5203 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5204 gimple *clobber_stmt;
5205 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5206 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5207 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5210 else
5212 /* Handle the case of inlining a function with no return
5213 statement, which causes the return value to become undefined. */
5214 if (gimple_call_lhs (stmt)
5215 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5217 tree name = gimple_call_lhs (stmt);
5218 tree var = SSA_NAME_VAR (name);
5219 tree def = var ? ssa_default_def (cfun, var) : NULL;
5221 if (def)
5223 /* If the variable is used undefined, make this name
5224 undefined via a move. */
5225 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5226 gsi_replace (&stmt_gsi, stmt, true);
5228 else
5230 if (!var)
5232 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5233 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5235 /* Otherwise make this variable undefined. */
5236 gsi_remove (&stmt_gsi, true);
5237 set_ssa_default_def (cfun, var, name);
5238 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5241 /* Replace with a clobber for id->retvar. */
5242 else if (flag_stack_reuse != SR_NONE
5243 && id->retvar
5244 && VAR_P (id->retvar)
5245 && id->retvar != return_slot
5246 && id->retvar != modify_dest
5247 && !TREE_THIS_VOLATILE (id->retvar)
5248 && !is_gimple_reg (id->retvar))
5250 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5251 gimple *clobber_stmt;
5252 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5253 gimple_set_location (clobber_stmt, gimple_location (stmt));
5254 gsi_replace (&stmt_gsi, clobber_stmt, false);
5255 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5257 else
5258 gsi_remove (&stmt_gsi, true);
5261 if (purge_dead_abnormal_edges)
5262 bitmap_set_bit (to_purge, return_block->index);
5264 /* If the value of the new expression is ignored, that's OK. We
5265 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5266 the equivalent inlined version either. */
5267 if (is_gimple_assign (stmt))
5269 gcc_assert (gimple_assign_single_p (stmt)
5270 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5271 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5274 id->add_clobbers_to_eh_landing_pads = 0;
5276 /* Output the inlining info for this abstract function, since it has been
5277 inlined. If we don't do this now, we can lose the information about the
5278 variables in the function when the blocks get blown away as soon as we
5279 remove the cgraph node. */
5280 if (gimple_block (stmt))
5281 (*debug_hooks->outlining_inline_function) (fn);
5283 /* Update callgraph if needed. */
5284 cg_edge->callee->remove ();
5286 id->block = NULL_TREE;
5287 id->retvar = NULL_TREE;
5288 successfully_inlined = true;
5290 egress:
5291 input_location = saved_location;
5292 return successfully_inlined;
5295 /* Expand call statements reachable from STMT_P.
5296 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5297 in a MODIFY_EXPR. */
5299 static bool
5300 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5301 bitmap to_purge)
5303 gimple_stmt_iterator gsi;
5304 bool inlined = false;
5306 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5308 gimple *stmt = gsi_stmt (gsi);
5309 gsi_prev (&gsi);
5311 if (is_gimple_call (stmt)
5312 && !gimple_call_internal_p (stmt))
5313 inlined |= expand_call_inline (bb, stmt, id, to_purge);
5316 return inlined;
5320 /* Walk all basic blocks created after FIRST and try to fold every statement
5321 in the STATEMENTS pointer set. */
5323 static void
5324 fold_marked_statements (int first, hash_set<gimple *> *statements)
5326 auto_bitmap to_purge;
5328 auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5329 auto_sbitmap visited (last_basic_block_for_fn (cfun));
5330 bitmap_clear (visited);
5332 stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5333 while (!stack.is_empty ())
5335 /* Look at the edge on the top of the stack. */
5336 edge e = stack.pop ();
5337 basic_block dest = e->dest;
5339 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5340 || bitmap_bit_p (visited, dest->index))
5341 continue;
5343 bitmap_set_bit (visited, dest->index);
5345 if (dest->index >= first)
5346 for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5347 !gsi_end_p (gsi); gsi_next (&gsi))
5349 if (!statements->contains (gsi_stmt (gsi)))
5350 continue;
5352 gimple *old_stmt = gsi_stmt (gsi);
5353 tree old_decl = (is_gimple_call (old_stmt)
5354 ? gimple_call_fndecl (old_stmt) : 0);
5355 if (old_decl && fndecl_built_in_p (old_decl))
5357 /* Folding builtins can create multiple instructions,
5358 we need to look at all of them. */
5359 gimple_stmt_iterator i2 = gsi;
5360 gsi_prev (&i2);
5361 if (fold_stmt (&gsi))
5363 gimple *new_stmt;
5364 /* If a builtin at the end of a bb folded into nothing,
5365 the following loop won't work. */
5366 if (gsi_end_p (gsi))
5368 cgraph_update_edges_for_call_stmt (old_stmt,
5369 old_decl, NULL);
5370 break;
5372 if (gsi_end_p (i2))
5373 i2 = gsi_start_bb (dest);
5374 else
5375 gsi_next (&i2);
5376 while (1)
5378 new_stmt = gsi_stmt (i2);
5379 update_stmt (new_stmt);
5380 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5381 new_stmt);
5383 if (new_stmt == gsi_stmt (gsi))
5385 /* It is okay to check only for the very last
5386 of these statements. If it is a throwing
5387 statement nothing will change. If it isn't
5388 this can remove EH edges. If that weren't
5389 correct then because some intermediate stmts
5390 throw, but not the last one. That would mean
5391 we'd have to split the block, which we can't
5392 here and we'd loose anyway. And as builtins
5393 probably never throw, this all
5394 is mood anyway. */
5395 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5396 new_stmt))
5397 bitmap_set_bit (to_purge, dest->index);
5398 break;
5400 gsi_next (&i2);
5404 else if (fold_stmt (&gsi))
5406 /* Re-read the statement from GSI as fold_stmt() may
5407 have changed it. */
5408 gimple *new_stmt = gsi_stmt (gsi);
5409 update_stmt (new_stmt);
5411 if (is_gimple_call (old_stmt)
5412 || is_gimple_call (new_stmt))
5413 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5414 new_stmt);
5416 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5417 bitmap_set_bit (to_purge, dest->index);
5421 if (EDGE_COUNT (dest->succs) > 0)
5423 /* Avoid warnings emitted from folding statements that
5424 became unreachable because of inlined function parameter
5425 propagation. */
5426 e = find_taken_edge (dest, NULL_TREE);
5427 if (e)
5428 stack.quick_push (e);
5429 else
5431 edge_iterator ei;
5432 FOR_EACH_EDGE (e, ei, dest->succs)
5433 stack.safe_push (e);
5438 gimple_purge_all_dead_eh_edges (to_purge);
5441 /* Expand calls to inline functions in the body of FN. */
5443 unsigned int
5444 optimize_inline_calls (tree fn)
5446 copy_body_data id;
5447 basic_block bb;
5448 int last = n_basic_blocks_for_fn (cfun);
5449 bool inlined_p = false;
5451 /* Clear out ID. */
5452 memset (&id, 0, sizeof (id));
5454 id.src_node = id.dst_node = cgraph_node::get (fn);
5455 gcc_assert (id.dst_node->definition);
5456 id.dst_fn = fn;
5457 /* Or any functions that aren't finished yet. */
5458 if (current_function_decl)
5459 id.dst_fn = current_function_decl;
5461 id.copy_decl = copy_decl_maybe_to_var;
5462 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5463 id.transform_new_cfg = false;
5464 id.transform_return_to_modify = true;
5465 id.transform_parameter = true;
5466 id.transform_lang_insert_block = NULL;
5467 id.statements_to_fold = new hash_set<gimple *>;
5469 push_gimplify_context ();
5471 /* We make no attempts to keep dominance info up-to-date. */
5472 free_dominance_info (CDI_DOMINATORS);
5473 free_dominance_info (CDI_POST_DOMINATORS);
5475 /* Register specific gimple functions. */
5476 gimple_register_cfg_hooks ();
5478 /* Reach the trees by walking over the CFG, and note the
5479 enclosing basic-blocks in the call edges. */
5480 /* We walk the blocks going forward, because inlined function bodies
5481 will split id->current_basic_block, and the new blocks will
5482 follow it; we'll trudge through them, processing their CALL_EXPRs
5483 along the way. */
5484 auto_bitmap to_purge;
5485 FOR_EACH_BB_FN (bb, cfun)
5486 inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5488 pop_gimplify_context (NULL);
5490 if (flag_checking)
5492 struct cgraph_edge *e;
5494 id.dst_node->verify ();
5496 /* Double check that we inlined everything we are supposed to inline. */
5497 for (e = id.dst_node->callees; e; e = e->next_callee)
5498 gcc_assert (e->inline_failed);
5501 /* If we didn't inline into the function there is nothing to do. */
5502 if (!inlined_p)
5504 delete id.statements_to_fold;
5505 return 0;
5508 /* Fold queued statements. */
5509 update_max_bb_count ();
5510 fold_marked_statements (last, id.statements_to_fold);
5511 delete id.statements_to_fold;
5513 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5514 We need to do this after fold_marked_statements since that may walk
5515 the SSA use-def chain. */
5516 unsigned i;
5517 bitmap_iterator bi;
5518 EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5520 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5521 if (bb)
5523 gimple_purge_dead_eh_edges (bb);
5524 gimple_purge_dead_abnormal_call_edges (bb);
5528 gcc_assert (!id.debug_stmts.exists ());
5530 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5531 number_blocks (fn);
5533 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5534 id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5536 if (flag_checking)
5537 id.dst_node->verify ();
5539 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5540 not possible yet - the IPA passes might make various functions to not
5541 throw and they don't care to proactively update local EH info. This is
5542 done later in fixup_cfg pass that also execute the verification. */
5543 return (TODO_update_ssa
5544 | TODO_cleanup_cfg
5545 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5546 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5547 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5548 ? TODO_rebuild_frequencies : 0));
5551 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5553 tree
5554 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5556 enum tree_code code = TREE_CODE (*tp);
5557 enum tree_code_class cl = TREE_CODE_CLASS (code);
5559 /* We make copies of most nodes. */
5560 if (IS_EXPR_CODE_CLASS (cl)
5561 || code == TREE_LIST
5562 || code == TREE_VEC
5563 || code == TYPE_DECL
5564 || code == OMP_CLAUSE)
5566 /* Because the chain gets clobbered when we make a copy, we save it
5567 here. */
5568 tree chain = NULL_TREE, new_tree;
5570 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5571 chain = TREE_CHAIN (*tp);
5573 /* Copy the node. */
5574 new_tree = copy_node (*tp);
5576 *tp = new_tree;
5578 /* Now, restore the chain, if appropriate. That will cause
5579 walk_tree to walk into the chain as well. */
5580 if (code == PARM_DECL
5581 || code == TREE_LIST
5582 || code == OMP_CLAUSE)
5583 TREE_CHAIN (*tp) = chain;
5585 /* For now, we don't update BLOCKs when we make copies. So, we
5586 have to nullify all BIND_EXPRs. */
5587 if (TREE_CODE (*tp) == BIND_EXPR)
5588 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5590 else if (code == CONSTRUCTOR)
5592 /* CONSTRUCTOR nodes need special handling because
5593 we need to duplicate the vector of elements. */
5594 tree new_tree;
5596 new_tree = copy_node (*tp);
5597 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5598 *tp = new_tree;
5600 else if (code == STATEMENT_LIST)
5601 /* We used to just abort on STATEMENT_LIST, but we can run into them
5602 with statement-expressions (c++/40975). */
5603 copy_statement_list (tp);
5604 else if (TREE_CODE_CLASS (code) == tcc_type)
5605 *walk_subtrees = 0;
5606 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5607 *walk_subtrees = 0;
5608 else if (TREE_CODE_CLASS (code) == tcc_constant)
5609 *walk_subtrees = 0;
5610 return NULL_TREE;
5613 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5614 information indicating to what new SAVE_EXPR this one should be mapped,
5615 use that one. Otherwise, create a new node and enter it in ST. FN is
5616 the function into which the copy will be placed. */
5618 static void
5619 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5621 tree *n;
5622 tree t;
5624 /* See if we already encountered this SAVE_EXPR. */
5625 n = st->get (*tp);
5627 /* If we didn't already remap this SAVE_EXPR, do so now. */
5628 if (!n)
5630 t = copy_node (*tp);
5632 /* Remember this SAVE_EXPR. */
5633 st->put (*tp, t);
5634 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5635 st->put (t, t);
5637 else
5639 /* We've already walked into this SAVE_EXPR; don't do it again. */
5640 *walk_subtrees = 0;
5641 t = *n;
5644 /* Replace this SAVE_EXPR with the copy. */
5645 *tp = t;
5648 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5649 label, copies the declaration and enters it in the splay_tree in DATA (which
5650 is really a 'copy_body_data *'. */
5652 static tree
5653 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5654 bool *handled_ops_p ATTRIBUTE_UNUSED,
5655 struct walk_stmt_info *wi)
5657 copy_body_data *id = (copy_body_data *) wi->info;
5658 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5660 if (stmt)
5662 tree decl = gimple_label_label (stmt);
5664 /* Copy the decl and remember the copy. */
5665 insert_decl_map (id, decl, id->copy_decl (decl, id));
5668 return NULL_TREE;
5671 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5672 struct walk_stmt_info *wi);
5674 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5675 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5676 remaps all local declarations to appropriate replacements in gimple
5677 operands. */
5679 static tree
5680 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5682 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5683 copy_body_data *id = (copy_body_data *) wi->info;
5684 hash_map<tree, tree> *st = id->decl_map;
5685 tree *n;
5686 tree expr = *tp;
5688 /* For recursive invocations this is no longer the LHS itself. */
5689 bool is_lhs = wi->is_lhs;
5690 wi->is_lhs = false;
5692 if (TREE_CODE (expr) == SSA_NAME)
5694 *tp = remap_ssa_name (*tp, id);
5695 *walk_subtrees = 0;
5696 if (is_lhs)
5697 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5699 /* Only a local declaration (variable or label). */
5700 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5701 || TREE_CODE (expr) == LABEL_DECL)
5703 /* Lookup the declaration. */
5704 n = st->get (expr);
5706 /* If it's there, remap it. */
5707 if (n)
5708 *tp = *n;
5709 *walk_subtrees = 0;
5711 else if (TREE_CODE (expr) == STATEMENT_LIST
5712 || TREE_CODE (expr) == BIND_EXPR
5713 || TREE_CODE (expr) == SAVE_EXPR)
5714 gcc_unreachable ();
5715 else if (TREE_CODE (expr) == TARGET_EXPR)
5717 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5718 It's OK for this to happen if it was part of a subtree that
5719 isn't immediately expanded, such as operand 2 of another
5720 TARGET_EXPR. */
5721 if (!TREE_OPERAND (expr, 1))
5723 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5724 TREE_OPERAND (expr, 3) = NULL_TREE;
5727 else if (TREE_CODE (expr) == OMP_CLAUSE)
5729 /* Before the omplower pass completes, some OMP clauses can contain
5730 sequences that are neither copied by gimple_seq_copy nor walked by
5731 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5732 in those situations, we have to copy and process them explicitely. */
5734 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5736 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5737 seq = duplicate_remap_omp_clause_seq (seq, wi);
5738 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5740 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5742 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5743 seq = duplicate_remap_omp_clause_seq (seq, wi);
5744 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5746 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5748 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5749 seq = duplicate_remap_omp_clause_seq (seq, wi);
5750 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5751 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5752 seq = duplicate_remap_omp_clause_seq (seq, wi);
5753 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5757 /* Keep iterating. */
5758 return NULL_TREE;
5762 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5763 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5764 remaps all local declarations to appropriate replacements in gimple
5765 statements. */
5767 static tree
5768 replace_locals_stmt (gimple_stmt_iterator *gsip,
5769 bool *handled_ops_p ATTRIBUTE_UNUSED,
5770 struct walk_stmt_info *wi)
5772 copy_body_data *id = (copy_body_data *) wi->info;
5773 gimple *gs = gsi_stmt (*gsip);
5775 if (gbind *stmt = dyn_cast <gbind *> (gs))
5777 tree block = gimple_bind_block (stmt);
5779 if (block)
5781 remap_block (&block, id);
5782 gimple_bind_set_block (stmt, block);
5785 /* This will remap a lot of the same decls again, but this should be
5786 harmless. */
5787 if (gimple_bind_vars (stmt))
5789 tree old_var, decls = gimple_bind_vars (stmt);
5791 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5792 if (!can_be_nonlocal (old_var, id)
5793 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5794 remap_decl (old_var, id);
5796 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5797 id->prevent_decl_creation_for_types = true;
5798 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5799 id->prevent_decl_creation_for_types = false;
5803 /* Keep iterating. */
5804 return NULL_TREE;
5807 /* Create a copy of SEQ and remap all decls in it. */
5809 static gimple_seq
5810 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5812 if (!seq)
5813 return NULL;
5815 /* If there are any labels in OMP sequences, they can be only referred to in
5816 the sequence itself and therefore we can do both here. */
5817 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5818 gimple_seq copy = gimple_seq_copy (seq);
5819 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5820 return copy;
5823 /* Copies everything in SEQ and replaces variables and labels local to
5824 current_function_decl. */
5826 gimple_seq
5827 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5829 copy_body_data id;
5830 struct walk_stmt_info wi;
5831 gimple_seq copy;
5833 /* There's nothing to do for NULL_TREE. */
5834 if (seq == NULL)
5835 return seq;
5837 /* Set up ID. */
5838 memset (&id, 0, sizeof (id));
5839 id.src_fn = current_function_decl;
5840 id.dst_fn = current_function_decl;
5841 id.src_cfun = cfun;
5842 id.decl_map = new hash_map<tree, tree>;
5843 id.debug_map = NULL;
5845 id.copy_decl = copy_decl_no_change;
5846 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5847 id.transform_new_cfg = false;
5848 id.transform_return_to_modify = false;
5849 id.transform_parameter = false;
5850 id.transform_lang_insert_block = NULL;
5852 /* Walk the tree once to find local labels. */
5853 memset (&wi, 0, sizeof (wi));
5854 hash_set<tree> visited;
5855 wi.info = &id;
5856 wi.pset = &visited;
5857 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5859 copy = gimple_seq_copy (seq);
5861 /* Walk the copy, remapping decls. */
5862 memset (&wi, 0, sizeof (wi));
5863 wi.info = &id;
5864 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5866 /* Clean up. */
5867 delete id.decl_map;
5868 if (id.debug_map)
5869 delete id.debug_map;
5870 if (id.dependence_map)
5872 delete id.dependence_map;
5873 id.dependence_map = NULL;
5876 return copy;
5880 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5882 static tree
5883 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5885 if (*tp == data)
5886 return (tree) data;
5887 else
5888 return NULL;
5891 DEBUG_FUNCTION bool
5892 debug_find_tree (tree top, tree search)
5894 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5898 /* Declare the variables created by the inliner. Add all the variables in
5899 VARS to BIND_EXPR. */
5901 static void
5902 declare_inline_vars (tree block, tree vars)
5904 tree t;
5905 for (t = vars; t; t = DECL_CHAIN (t))
5907 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5908 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5909 add_local_decl (cfun, t);
5912 if (block)
5913 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5916 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5917 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5918 VAR_DECL translation. */
5920 tree
5921 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5923 /* Don't generate debug information for the copy if we wouldn't have
5924 generated it for the copy either. */
5925 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5926 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5928 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5929 declaration inspired this copy. */
5930 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5932 /* The new variable/label has no RTL, yet. */
5933 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5934 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5935 SET_DECL_RTL (copy, 0);
5936 /* For vector typed decls make sure to update DECL_MODE according
5937 to the new function context. */
5938 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5939 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5941 /* These args would always appear unused, if not for this. */
5942 TREE_USED (copy) = 1;
5944 /* Set the context for the new declaration. */
5945 if (!DECL_CONTEXT (decl))
5946 /* Globals stay global. */
5948 else if (DECL_CONTEXT (decl) != id->src_fn)
5949 /* Things that weren't in the scope of the function we're inlining
5950 from aren't in the scope we're inlining to, either. */
5952 else if (TREE_STATIC (decl))
5953 /* Function-scoped static variables should stay in the original
5954 function. */
5956 else
5958 /* Ordinary automatic local variables are now in the scope of the
5959 new function. */
5960 DECL_CONTEXT (copy) = id->dst_fn;
5961 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5963 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5964 DECL_ATTRIBUTES (copy)
5965 = tree_cons (get_identifier ("omp simt private"), NULL,
5966 DECL_ATTRIBUTES (copy));
5967 id->dst_simt_vars->safe_push (copy);
5971 return copy;
5974 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5975 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5976 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5978 tree
5979 copy_decl_to_var (tree decl, copy_body_data *id)
5981 tree copy, type;
5983 gcc_assert (TREE_CODE (decl) == PARM_DECL
5984 || TREE_CODE (decl) == RESULT_DECL);
5986 type = TREE_TYPE (decl);
5988 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5989 VAR_DECL, DECL_NAME (decl), type);
5990 if (DECL_PT_UID_SET_P (decl))
5991 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5992 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5993 TREE_READONLY (copy) = TREE_READONLY (decl);
5994 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5995 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
5996 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
5998 return copy_decl_for_dup_finish (id, decl, copy);
6001 /* Like copy_decl_to_var, but create a return slot object instead of a
6002 pointer variable for return by invisible reference. */
6004 static tree
6005 copy_result_decl_to_var (tree decl, copy_body_data *id)
6007 tree copy, type;
6009 gcc_assert (TREE_CODE (decl) == PARM_DECL
6010 || TREE_CODE (decl) == RESULT_DECL);
6012 type = TREE_TYPE (decl);
6013 if (DECL_BY_REFERENCE (decl))
6014 type = TREE_TYPE (type);
6016 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
6017 VAR_DECL, DECL_NAME (decl), type);
6018 if (DECL_PT_UID_SET_P (decl))
6019 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
6020 TREE_READONLY (copy) = TREE_READONLY (decl);
6021 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6022 if (!DECL_BY_REFERENCE (decl))
6024 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6025 DECL_NOT_GIMPLE_REG_P (copy)
6026 = (DECL_NOT_GIMPLE_REG_P (decl)
6027 /* RESULT_DECLs are treated special by needs_to_live_in_memory,
6028 mirror that to the created VAR_DECL. */
6029 || (TREE_CODE (decl) == RESULT_DECL
6030 && aggregate_value_p (decl, id->src_fn)));
6033 return copy_decl_for_dup_finish (id, decl, copy);
6036 tree
6037 copy_decl_no_change (tree decl, copy_body_data *id)
6039 tree copy;
6041 copy = copy_node (decl);
6043 /* The COPY is not abstract; it will be generated in DST_FN. */
6044 DECL_ABSTRACT_P (copy) = false;
6045 lang_hooks.dup_lang_specific_decl (copy);
6047 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6048 been taken; it's for internal bookkeeping in expand_goto_internal. */
6049 if (TREE_CODE (copy) == LABEL_DECL)
6051 TREE_ADDRESSABLE (copy) = 0;
6052 LABEL_DECL_UID (copy) = -1;
6055 return copy_decl_for_dup_finish (id, decl, copy);
6058 static tree
6059 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6061 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6062 return copy_decl_to_var (decl, id);
6063 else
6064 return copy_decl_no_change (decl, id);
6067 /* Return a copy of the function's argument tree without any modifications. */
6069 static tree
6070 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6072 tree arg, *parg;
6073 tree new_parm = NULL;
6075 parg = &new_parm;
6076 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6078 tree new_tree = remap_decl (arg, id);
6079 if (TREE_CODE (new_tree) != PARM_DECL)
6080 new_tree = id->copy_decl (arg, id);
6081 lang_hooks.dup_lang_specific_decl (new_tree);
6082 *parg = new_tree;
6083 parg = &DECL_CHAIN (new_tree);
6085 return new_parm;
6088 /* Return a copy of the function's static chain. */
6089 static tree
6090 copy_static_chain (tree static_chain, copy_body_data * id)
6092 tree *chain_copy, *pvar;
6094 chain_copy = &static_chain;
6095 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6097 tree new_tree = remap_decl (*pvar, id);
6098 lang_hooks.dup_lang_specific_decl (new_tree);
6099 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6100 *pvar = new_tree;
6102 return static_chain;
6105 /* Return true if the function is allowed to be versioned.
6106 This is a guard for the versioning functionality. */
6108 bool
6109 tree_versionable_function_p (tree fndecl)
6111 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6112 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6115 /* Update clone info after duplication. */
6117 static void
6118 update_clone_info (copy_body_data * id)
6120 clone_info *dst_info = clone_info::get (id->dst_node);
6121 vec<ipa_param_performed_split, va_gc> *cur_performed_splits
6122 = dst_info ? dst_info->performed_splits : NULL;
6123 if (cur_performed_splits)
6125 unsigned len = cur_performed_splits->length ();
6126 for (unsigned i = 0; i < len; i++)
6128 ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
6129 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6133 struct cgraph_node *node;
6134 if (!id->dst_node->clones)
6135 return;
6136 for (node = id->dst_node->clones; node != id->dst_node;)
6138 /* First update replace maps to match the new body. */
6139 clone_info *info = clone_info::get (node);
6140 if (info && info->tree_map)
6142 unsigned int i;
6143 for (i = 0; i < vec_safe_length (info->tree_map); i++)
6145 struct ipa_replace_map *replace_info;
6146 replace_info = (*info->tree_map)[i];
6147 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6150 if (info && info->performed_splits)
6152 unsigned len = vec_safe_length (info->performed_splits);
6153 for (unsigned i = 0; i < len; i++)
6155 ipa_param_performed_split *ps
6156 = &(*info->performed_splits)[i];
6157 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6160 if (unsigned len = vec_safe_length (cur_performed_splits))
6162 /* We do not want to add current performed splits when we are saving
6163 a copy of function body for later during inlining, that would just
6164 duplicate all entries. So let's have a look whether anything
6165 referring to the first dummy_decl is present. */
6166 if (!info)
6167 info = clone_info::get_create (node);
6168 unsigned dst_len = vec_safe_length (info->performed_splits);
6169 ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6170 for (unsigned i = 0; i < dst_len; i++)
6171 if ((*info->performed_splits)[i].dummy_decl
6172 == first->dummy_decl)
6174 len = 0;
6175 break;
6178 for (unsigned i = 0; i < len; i++)
6179 vec_safe_push (info->performed_splits,
6180 (*cur_performed_splits)[i]);
6181 if (flag_checking)
6183 for (unsigned i = 0; i < dst_len; i++)
6185 ipa_param_performed_split *ps1
6186 = &(*info->performed_splits)[i];
6187 for (unsigned j = i + 1; j < dst_len; j++)
6189 ipa_param_performed_split *ps2
6190 = &(*info->performed_splits)[j];
6191 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6192 || ps1->unit_offset != ps2->unit_offset);
6198 if (node->clones)
6199 node = node->clones;
6200 else if (node->next_sibling_clone)
6201 node = node->next_sibling_clone;
6202 else
6204 while (node != id->dst_node && !node->next_sibling_clone)
6205 node = node->clone_of;
6206 if (node != id->dst_node)
6207 node = node->next_sibling_clone;
6212 /* Create a copy of a function's tree.
6213 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6214 of the original function and the new copied function
6215 respectively. In case we want to replace a DECL
6216 tree with another tree while duplicating the function's
6217 body, TREE_MAP represents the mapping between these
6218 trees. If UPDATE_CLONES is set, the call_stmt fields
6219 of edges of clones of the function will be updated.
6221 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6222 function parameters and return value) should be modified).
6223 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6224 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6226 void
6227 tree_function_versioning (tree old_decl, tree new_decl,
6228 vec<ipa_replace_map *, va_gc> *tree_map,
6229 ipa_param_adjustments *param_adjustments,
6230 bool update_clones, bitmap blocks_to_copy,
6231 basic_block new_entry)
6233 struct cgraph_node *old_version_node;
6234 struct cgraph_node *new_version_node;
6235 copy_body_data id;
6236 tree p;
6237 unsigned i;
6238 struct ipa_replace_map *replace_info;
6239 basic_block old_entry_block, bb;
6240 auto_vec<gimple *, 10> init_stmts;
6241 tree vars = NULL_TREE;
6243 /* We can get called recursively from expand_call_inline via clone
6244 materialization. While expand_call_inline maintains input_location
6245 we cannot tolerate it to leak into the materialized clone. */
6246 location_t saved_location = input_location;
6247 input_location = UNKNOWN_LOCATION;
6249 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6250 && TREE_CODE (new_decl) == FUNCTION_DECL);
6251 DECL_POSSIBLY_INLINED (old_decl) = 1;
6253 old_version_node = cgraph_node::get (old_decl);
6254 gcc_checking_assert (old_version_node);
6255 new_version_node = cgraph_node::get (new_decl);
6256 gcc_checking_assert (new_version_node);
6258 /* Copy over debug args. */
6259 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6261 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6262 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6263 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6264 old_debug_args = decl_debug_args_lookup (old_decl);
6265 if (old_debug_args)
6267 new_debug_args = decl_debug_args_insert (new_decl);
6268 *new_debug_args = vec_safe_copy (*old_debug_args);
6272 /* Output the inlining info for this abstract function, since it has been
6273 inlined. If we don't do this now, we can lose the information about the
6274 variables in the function when the blocks get blown away as soon as we
6275 remove the cgraph node. */
6276 (*debug_hooks->outlining_inline_function) (old_decl);
6278 DECL_ARTIFICIAL (new_decl) = 1;
6279 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6280 if (DECL_ORIGIN (old_decl) == old_decl)
6281 old_version_node->used_as_abstract_origin = true;
6282 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6284 /* Prepare the data structures for the tree copy. */
6285 memset (&id, 0, sizeof (id));
6287 /* Generate a new name for the new version. */
6288 id.statements_to_fold = new hash_set<gimple *>;
6290 id.decl_map = new hash_map<tree, tree>;
6291 id.debug_map = NULL;
6292 id.src_fn = old_decl;
6293 id.dst_fn = new_decl;
6294 id.src_node = old_version_node;
6295 id.dst_node = new_version_node;
6296 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6297 id.blocks_to_copy = blocks_to_copy;
6299 id.copy_decl = copy_decl_no_change;
6300 id.transform_call_graph_edges
6301 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6302 id.transform_new_cfg = true;
6303 id.transform_return_to_modify = false;
6304 id.transform_parameter = false;
6305 id.transform_lang_insert_block = NULL;
6307 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6308 (DECL_STRUCT_FUNCTION (old_decl));
6309 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6310 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6311 initialize_cfun (new_decl, old_decl,
6312 new_entry ? new_entry->count : old_entry_block->count);
6313 new_version_node->calls_declare_variant_alt
6314 = old_version_node->calls_declare_variant_alt;
6315 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6316 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6317 = id.src_cfun->gimple_df->ipa_pta;
6319 /* Copy the function's static chain. */
6320 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6321 if (p)
6322 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6323 = copy_static_chain (p, &id);
6325 auto_vec<int, 16> new_param_indices;
6326 clone_info *info = clone_info::get (old_version_node);
6327 ipa_param_adjustments *old_param_adjustments
6328 = info ? info->param_adjustments : NULL;
6329 if (old_param_adjustments)
6330 old_param_adjustments->get_updated_indices (&new_param_indices);
6332 /* If there's a tree_map, prepare for substitution. */
6333 if (tree_map)
6334 for (i = 0; i < tree_map->length (); i++)
6336 gimple *init;
6337 replace_info = (*tree_map)[i];
6339 int p = replace_info->parm_num;
6340 if (old_param_adjustments)
6341 p = new_param_indices[p];
6343 tree parm;
6344 for (parm = DECL_ARGUMENTS (old_decl); p;
6345 parm = DECL_CHAIN (parm))
6346 p--;
6347 gcc_assert (parm);
6348 init = setup_one_parameter (&id, parm, replace_info->new_tree,
6349 id.src_fn, NULL, &vars);
6350 if (init)
6351 init_stmts.safe_push (init);
6354 ipa_param_body_adjustments *param_body_adjs = NULL;
6355 if (param_adjustments)
6357 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6358 new_decl, old_decl,
6359 &id, &vars, tree_map);
6360 id.param_body_adjs = param_body_adjs;
6361 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6363 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6364 DECL_ARGUMENTS (new_decl)
6365 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6367 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6368 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6370 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6372 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6373 /* Add local vars. */
6374 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6376 if (DECL_RESULT (old_decl) == NULL_TREE)
6378 else if (param_adjustments && param_adjustments->m_skip_return
6379 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6381 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6382 &id);
6383 declare_inline_vars (NULL, resdecl_repl);
6384 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6386 DECL_RESULT (new_decl)
6387 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6388 RESULT_DECL, NULL_TREE, void_type_node);
6389 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6390 DECL_IS_MALLOC (new_decl) = false;
6391 cfun->returns_struct = 0;
6392 cfun->returns_pcc_struct = 0;
6394 else
6396 tree old_name;
6397 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6398 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6399 if (gimple_in_ssa_p (id.src_cfun)
6400 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6401 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6403 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6404 insert_decl_map (&id, old_name, new_name);
6405 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6406 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6410 /* Set up the destination functions loop tree. */
6411 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6413 cfun->curr_properties &= ~PROP_loops;
6414 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6415 cfun->curr_properties |= PROP_loops;
6418 /* Copy the Function's body. */
6419 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6420 new_entry);
6422 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6423 number_blocks (new_decl);
6425 /* We want to create the BB unconditionally, so that the addition of
6426 debug stmts doesn't affect BB count, which may in the end cause
6427 codegen differences. */
6428 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6429 while (init_stmts.length ())
6430 insert_init_stmt (&id, bb, init_stmts.pop ());
6431 update_clone_info (&id);
6433 /* Remap the nonlocal_goto_save_area, if any. */
6434 if (cfun->nonlocal_goto_save_area)
6436 struct walk_stmt_info wi;
6438 memset (&wi, 0, sizeof (wi));
6439 wi.info = &id;
6440 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6443 /* Clean up. */
6444 delete id.decl_map;
6445 if (id.debug_map)
6446 delete id.debug_map;
6447 free_dominance_info (CDI_DOMINATORS);
6448 free_dominance_info (CDI_POST_DOMINATORS);
6450 update_max_bb_count ();
6451 fold_marked_statements (0, id.statements_to_fold);
6452 delete id.statements_to_fold;
6453 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6454 if (id.dst_node->definition)
6455 cgraph_edge::rebuild_references ();
6456 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6458 calculate_dominance_info (CDI_DOMINATORS);
6459 fix_loop_structure (NULL);
6461 update_ssa (TODO_update_ssa);
6463 /* After partial cloning we need to rescale frequencies, so they are
6464 within proper range in the cloned function. */
6465 if (new_entry)
6467 struct cgraph_edge *e;
6468 rebuild_frequencies ();
6470 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6471 for (e = new_version_node->callees; e; e = e->next_callee)
6473 basic_block bb = gimple_bb (e->call_stmt);
6474 e->count = bb->count;
6476 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6478 basic_block bb = gimple_bb (e->call_stmt);
6479 e->count = bb->count;
6483 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6485 vec<tree, va_gc> **debug_args = NULL;
6486 unsigned int len = 0;
6487 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6489 for (i = 0; i < reset_len; i++)
6491 tree parm = param_body_adjs->m_reset_debug_decls[i];
6492 gcc_assert (is_gimple_reg (parm));
6493 tree ddecl;
6495 if (debug_args == NULL)
6497 debug_args = decl_debug_args_insert (new_decl);
6498 len = vec_safe_length (*debug_args);
6500 ddecl = make_node (DEBUG_EXPR_DECL);
6501 DECL_ARTIFICIAL (ddecl) = 1;
6502 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6503 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6504 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6505 vec_safe_push (*debug_args, ddecl);
6507 if (debug_args != NULL)
6509 /* On the callee side, add
6510 DEBUG D#Y s=> parm
6511 DEBUG var => D#Y
6512 stmts to the first bb where var is a VAR_DECL created for the
6513 optimized away parameter in DECL_INITIAL block. This hints
6514 in the debug info that var (whole DECL_ORIGIN is the parm
6515 PARM_DECL) is optimized away, but could be looked up at the
6516 call site as value of D#X there. */
6517 tree vexpr;
6518 gimple_stmt_iterator cgsi
6519 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6520 gimple *def_temp;
6521 tree var = vars;
6522 i = vec_safe_length (*debug_args);
6525 i -= 2;
6526 while (var != NULL_TREE
6527 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6528 var = TREE_CHAIN (var);
6529 if (var == NULL_TREE)
6530 break;
6531 vexpr = make_node (DEBUG_EXPR_DECL);
6532 tree parm = (**debug_args)[i];
6533 DECL_ARTIFICIAL (vexpr) = 1;
6534 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6535 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6536 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6537 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6538 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6539 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6541 while (i > len);
6544 delete param_body_adjs;
6545 free_dominance_info (CDI_DOMINATORS);
6546 free_dominance_info (CDI_POST_DOMINATORS);
6548 gcc_assert (!id.debug_stmts.exists ());
6549 pop_cfun ();
6550 input_location = saved_location;
6551 return;
6554 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6555 the callee and return the inlined body on success. */
6557 tree
6558 maybe_inline_call_in_expr (tree exp)
6560 tree fn = get_callee_fndecl (exp);
6562 /* We can only try to inline "const" functions. */
6563 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6565 call_expr_arg_iterator iter;
6566 copy_body_data id;
6567 tree param, arg, t;
6568 hash_map<tree, tree> decl_map;
6570 /* Remap the parameters. */
6571 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6572 param;
6573 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6574 decl_map.put (param, arg);
6576 memset (&id, 0, sizeof (id));
6577 id.src_fn = fn;
6578 id.dst_fn = current_function_decl;
6579 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6580 id.decl_map = &decl_map;
6582 id.copy_decl = copy_decl_no_change;
6583 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6584 id.transform_new_cfg = false;
6585 id.transform_return_to_modify = true;
6586 id.transform_parameter = true;
6587 id.transform_lang_insert_block = NULL;
6589 /* Make sure not to unshare trees behind the front-end's back
6590 since front-end specific mechanisms may rely on sharing. */
6591 id.regimplify = false;
6592 id.do_not_unshare = true;
6594 /* We're not inside any EH region. */
6595 id.eh_lp_nr = 0;
6597 t = copy_tree_body (&id);
6599 /* We can only return something suitable for use in a GENERIC
6600 expression tree. */
6601 if (TREE_CODE (t) == MODIFY_EXPR)
6602 return TREE_OPERAND (t, 1);
6605 return NULL_TREE;
6608 /* Duplicate a type, fields and all. */
6610 tree
6611 build_duplicate_type (tree type)
6613 struct copy_body_data id;
6615 memset (&id, 0, sizeof (id));
6616 id.src_fn = current_function_decl;
6617 id.dst_fn = current_function_decl;
6618 id.src_cfun = cfun;
6619 id.decl_map = new hash_map<tree, tree>;
6620 id.debug_map = NULL;
6621 id.copy_decl = copy_decl_no_change;
6623 type = remap_type_1 (type, &id);
6625 delete id.decl_map;
6626 if (id.debug_map)
6627 delete id.debug_map;
6629 TYPE_CANONICAL (type) = type;
6631 return type;
6634 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6635 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6636 evaluation. */
6638 tree
6639 copy_fn (tree fn, tree& parms, tree& result)
6641 copy_body_data id;
6642 tree param;
6643 hash_map<tree, tree> decl_map;
6645 tree *p = &parms;
6646 *p = NULL_TREE;
6648 memset (&id, 0, sizeof (id));
6649 id.src_fn = fn;
6650 id.dst_fn = current_function_decl;
6651 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6652 id.decl_map = &decl_map;
6654 id.copy_decl = copy_decl_no_change;
6655 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6656 id.transform_new_cfg = false;
6657 id.transform_return_to_modify = false;
6658 id.transform_parameter = true;
6659 id.transform_lang_insert_block = NULL;
6661 /* Make sure not to unshare trees behind the front-end's back
6662 since front-end specific mechanisms may rely on sharing. */
6663 id.regimplify = false;
6664 id.do_not_unshare = true;
6665 id.do_not_fold = true;
6667 /* We're not inside any EH region. */
6668 id.eh_lp_nr = 0;
6670 /* Remap the parameters and result and return them to the caller. */
6671 for (param = DECL_ARGUMENTS (fn);
6672 param;
6673 param = DECL_CHAIN (param))
6675 *p = remap_decl (param, &id);
6676 p = &DECL_CHAIN (*p);
6679 if (DECL_RESULT (fn))
6680 result = remap_decl (DECL_RESULT (fn), &id);
6681 else
6682 result = NULL_TREE;
6684 return copy_tree_body (&id);