Fix omp target issue with Fortran optional arguments
[official-gcc.git] / gcc / tree-inline.c
blobd6920f48693d3551b630583bd623056077e93dd5
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64 #include "tree-ssa-live.h"
66 /* I'm not real happy about this, but we need to handle gimple and
67 non-gimple trees. */
69 /* Inlining, Cloning, Versioning, Parallelization
71 Inlining: a function body is duplicated, but the PARM_DECLs are
72 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
73 MODIFY_EXPRs that store to a dedicated returned-value variable.
74 The duplicated eh_region info of the copy will later be appended
75 to the info for the caller; the eh_region info in copied throwing
76 statements and RESX statements are adjusted accordingly.
78 Cloning: (only in C++) We have one body for a con/de/structor, and
79 multiple function decls, each with a unique parameter list.
80 Duplicate the body, using the given splay tree; some parameters
81 will become constants (like 0 or 1).
83 Versioning: a function body is duplicated and the result is a new
84 function rather than into blocks of an existing function as with
85 inlining. Some parameters will become constants.
87 Parallelization: a region of a function is duplicated resulting in
88 a new function. Variables may be replaced with complex expressions
89 to enable shared variable semantics.
91 All of these will simultaneously lookup any callgraph edges. If
92 we're going to inline the duplicated function body, and the given
93 function has some cloned callgraph nodes (one for each place this
94 function will be inlined) those callgraph edges will be duplicated.
95 If we're cloning the body, those callgraph edges will be
96 updated to point into the new body. (Note that the original
97 callgraph node and edge list will not be altered.)
99 See the CALL_EXPR handling case in copy_tree_body_r (). */
101 /* To Do:
103 o In order to make inlining-on-trees work, we pessimized
104 function-local static constants. In particular, they are now
105 always output, even when not addressed. Fix this by treating
106 function-local static constants just like global static
107 constants; the back-end already knows not to output them if they
108 are not needed.
110 o Provide heuristics to clamp inlining of recursive template
111 calls? */
114 /* Weights that estimate_num_insns uses to estimate the size of the
115 produced code. */
117 eni_weights eni_size_weights;
119 /* Weights that estimate_num_insns uses to estimate the time necessary
120 to execute the produced code. */
122 eni_weights eni_time_weights;
124 /* Prototypes. */
126 static tree declare_return_variable (copy_body_data *, tree, tree,
127 basic_block);
128 static void remap_block (tree *, copy_body_data *);
129 static void copy_bind_expr (tree *, int *, copy_body_data *);
130 static void declare_inline_vars (tree, tree);
131 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
132 static void prepend_lexical_block (tree current_block, tree new_block);
133 static tree copy_result_decl_to_var (tree, copy_body_data *);
134 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
135 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
136 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
141 void
142 insert_decl_map (copy_body_data *id, tree key, tree value)
144 id->decl_map->put (key, value);
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
148 if (key != value)
149 id->decl_map->put (value, value);
152 /* Insert a tree->tree mapping for ID. This is only used for
153 variables. */
155 static void
156 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
158 if (!gimple_in_ssa_p (id->src_cfun))
159 return;
161 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162 return;
164 if (!target_for_debug_bind (key))
165 return;
167 gcc_assert (TREE_CODE (key) == PARM_DECL);
168 gcc_assert (VAR_P (value));
170 if (!id->debug_map)
171 id->debug_map = new hash_map<tree, tree>;
173 id->debug_map->put (key, value);
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
179 context. */
180 static int processing_debug_stmt = 0;
182 /* Construct new SSA name for old NAME. ID is the inline context. */
184 static tree
185 remap_ssa_name (tree name, copy_body_data *id)
187 tree new_tree, var;
188 tree *n;
190 gcc_assert (TREE_CODE (name) == SSA_NAME);
192 n = id->decl_map->get (name);
193 if (n)
195 /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
196 remove an unused LHS from a call statement. Such LHS can however
197 still appear in debug statements, but their value is lost in this
198 function and we do not want to map them. */
199 if (id->killed_new_ssa_names
200 && id->killed_new_ssa_names->contains (*n))
202 gcc_assert (processing_debug_stmt);
203 processing_debug_stmt = -1;
204 return name;
207 return unshare_expr (*n);
210 if (processing_debug_stmt)
212 if (SSA_NAME_IS_DEFAULT_DEF (name)
213 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
214 && id->entry_bb == NULL
215 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
217 tree vexpr = make_node (DEBUG_EXPR_DECL);
218 gimple *def_temp;
219 gimple_stmt_iterator gsi;
220 tree val = SSA_NAME_VAR (name);
222 n = id->decl_map->get (val);
223 if (n != NULL)
224 val = *n;
225 if (TREE_CODE (val) != PARM_DECL
226 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
228 processing_debug_stmt = -1;
229 return name;
231 n = id->decl_map->get (val);
232 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
233 return *n;
234 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
235 DECL_ARTIFICIAL (vexpr) = 1;
236 TREE_TYPE (vexpr) = TREE_TYPE (name);
237 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
238 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
239 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
240 insert_decl_map (id, val, vexpr);
241 return vexpr;
244 processing_debug_stmt = -1;
245 return name;
248 /* Remap anonymous SSA names or SSA names of anonymous decls. */
249 var = SSA_NAME_VAR (name);
250 if (!var
251 || (!SSA_NAME_IS_DEFAULT_DEF (name)
252 && VAR_P (var)
253 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
254 && DECL_ARTIFICIAL (var)
255 && DECL_IGNORED_P (var)
256 && !DECL_NAME (var)))
258 struct ptr_info_def *pi;
259 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
260 if (!var && SSA_NAME_IDENTIFIER (name))
261 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
262 insert_decl_map (id, name, new_tree);
263 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
264 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
265 /* At least IPA points-to info can be directly transferred. */
266 if (id->src_cfun->gimple_df
267 && id->src_cfun->gimple_df->ipa_pta
268 && POINTER_TYPE_P (TREE_TYPE (name))
269 && (pi = SSA_NAME_PTR_INFO (name))
270 && !pi->pt.anything)
272 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
273 new_pi->pt = pi->pt;
275 /* So can range-info. */
276 if (!POINTER_TYPE_P (TREE_TYPE (name))
277 && SSA_NAME_RANGE_INFO (name))
278 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
279 SSA_NAME_RANGE_INFO (name));
280 return new_tree;
283 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
284 in copy_bb. */
285 new_tree = remap_decl (var, id);
287 /* We might've substituted constant or another SSA_NAME for
288 the variable.
290 Replace the SSA name representing RESULT_DECL by variable during
291 inlining: this saves us from need to introduce PHI node in a case
292 return value is just partly initialized. */
293 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
294 && (!SSA_NAME_VAR (name)
295 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
296 || !id->transform_return_to_modify))
298 struct ptr_info_def *pi;
299 new_tree = make_ssa_name (new_tree);
300 insert_decl_map (id, name, new_tree);
301 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
302 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
303 /* At least IPA points-to info can be directly transferred. */
304 if (id->src_cfun->gimple_df
305 && id->src_cfun->gimple_df->ipa_pta
306 && POINTER_TYPE_P (TREE_TYPE (name))
307 && (pi = SSA_NAME_PTR_INFO (name))
308 && !pi->pt.anything)
310 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
311 new_pi->pt = pi->pt;
313 /* So can range-info. */
314 if (!POINTER_TYPE_P (TREE_TYPE (name))
315 && SSA_NAME_RANGE_INFO (name))
316 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
317 SSA_NAME_RANGE_INFO (name));
318 if (SSA_NAME_IS_DEFAULT_DEF (name))
320 /* By inlining function having uninitialized variable, we might
321 extend the lifetime (variable might get reused). This cause
322 ICE in the case we end up extending lifetime of SSA name across
323 abnormal edge, but also increase register pressure.
325 We simply initialize all uninitialized vars by 0 except
326 for case we are inlining to very first BB. We can avoid
327 this for all BBs that are not inside strongly connected
328 regions of the CFG, but this is expensive to test. */
329 if (id->entry_bb
330 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
331 && (!SSA_NAME_VAR (name)
332 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
333 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
334 0)->dest
335 || EDGE_COUNT (id->entry_bb->preds) != 1))
337 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
338 gimple *init_stmt;
339 tree zero = build_zero_cst (TREE_TYPE (new_tree));
341 init_stmt = gimple_build_assign (new_tree, zero);
342 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
343 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
345 else
347 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
348 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
352 else
353 insert_decl_map (id, name, new_tree);
354 return new_tree;
357 /* Remap DECL during the copying of the BLOCK tree for the function. */
359 tree
360 remap_decl (tree decl, copy_body_data *id)
362 tree *n;
364 /* We only remap local variables in the current function. */
366 /* See if we have remapped this declaration. */
368 n = id->decl_map->get (decl);
370 if (!n && processing_debug_stmt)
372 processing_debug_stmt = -1;
373 return decl;
376 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
377 necessary DECLs have already been remapped and we do not want to duplicate
378 a decl coming from outside of the sequence we are copying. */
379 if (!n
380 && id->prevent_decl_creation_for_types
381 && id->remapping_type_depth > 0
382 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
383 return decl;
385 /* If we didn't already have an equivalent for this declaration, create one
386 now. */
387 if (!n)
389 /* Make a copy of the variable or label. */
390 tree t = id->copy_decl (decl, id);
392 /* Remember it, so that if we encounter this local entity again
393 we can reuse this copy. Do this early because remap_type may
394 need this decl for TYPE_STUB_DECL. */
395 insert_decl_map (id, decl, t);
397 if (!DECL_P (t))
398 return t;
400 /* Remap types, if necessary. */
401 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
402 if (TREE_CODE (t) == TYPE_DECL)
404 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
406 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
407 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
408 is not set on the TYPE_DECL, for example in LTO mode. */
409 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
411 tree x = build_variant_type_copy (TREE_TYPE (t));
412 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
413 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
414 DECL_ORIGINAL_TYPE (t) = x;
418 /* Remap sizes as necessary. */
419 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
420 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
422 /* If fields, do likewise for offset and qualifier. */
423 if (TREE_CODE (t) == FIELD_DECL)
425 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
426 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
427 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
430 return t;
433 if (id->do_not_unshare)
434 return *n;
435 else
436 return unshare_expr (*n);
439 static tree
440 remap_type_1 (tree type, copy_body_data *id)
442 tree new_tree, t;
444 /* We do need a copy. build and register it now. If this is a pointer or
445 reference type, remap the designated type and make a new pointer or
446 reference type. */
447 if (TREE_CODE (type) == POINTER_TYPE)
449 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
450 TYPE_MODE (type),
451 TYPE_REF_CAN_ALIAS_ALL (type));
452 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
453 new_tree = build_type_attribute_qual_variant (new_tree,
454 TYPE_ATTRIBUTES (type),
455 TYPE_QUALS (type));
456 insert_decl_map (id, type, new_tree);
457 return new_tree;
459 else if (TREE_CODE (type) == REFERENCE_TYPE)
461 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
462 TYPE_MODE (type),
463 TYPE_REF_CAN_ALIAS_ALL (type));
464 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
465 new_tree = build_type_attribute_qual_variant (new_tree,
466 TYPE_ATTRIBUTES (type),
467 TYPE_QUALS (type));
468 insert_decl_map (id, type, new_tree);
469 return new_tree;
471 else
472 new_tree = copy_node (type);
474 insert_decl_map (id, type, new_tree);
476 /* This is a new type, not a copy of an old type. Need to reassociate
477 variants. We can handle everything except the main variant lazily. */
478 t = TYPE_MAIN_VARIANT (type);
479 if (type != t)
481 t = remap_type (t, id);
482 TYPE_MAIN_VARIANT (new_tree) = t;
483 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
484 TYPE_NEXT_VARIANT (t) = new_tree;
486 else
488 TYPE_MAIN_VARIANT (new_tree) = new_tree;
489 TYPE_NEXT_VARIANT (new_tree) = NULL;
492 if (TYPE_STUB_DECL (type))
493 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
495 /* Lazily create pointer and reference types. */
496 TYPE_POINTER_TO (new_tree) = NULL;
497 TYPE_REFERENCE_TO (new_tree) = NULL;
499 /* Copy all types that may contain references to local variables; be sure to
500 preserve sharing in between type and its main variant when possible. */
501 switch (TREE_CODE (new_tree))
503 case INTEGER_TYPE:
504 case REAL_TYPE:
505 case FIXED_POINT_TYPE:
506 case ENUMERAL_TYPE:
507 case BOOLEAN_TYPE:
508 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
510 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
511 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
513 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
514 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
516 else
518 t = TYPE_MIN_VALUE (new_tree);
519 if (t && TREE_CODE (t) != INTEGER_CST)
520 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
522 t = TYPE_MAX_VALUE (new_tree);
523 if (t && TREE_CODE (t) != INTEGER_CST)
524 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
526 return new_tree;
528 case FUNCTION_TYPE:
529 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
530 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
531 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
532 else
533 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
534 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
535 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
536 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
537 else
538 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
539 return new_tree;
541 case ARRAY_TYPE:
542 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
543 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
544 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
545 else
546 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
548 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
550 gcc_checking_assert (TYPE_DOMAIN (type)
551 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
552 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
554 else
556 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
557 /* For array bounds where we have decided not to copy over the bounds
558 variable which isn't used in OpenMP/OpenACC region, change them to
559 an uninitialized VAR_DECL temporary. */
560 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
561 && id->adjust_array_error_bounds
562 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
564 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
565 DECL_ATTRIBUTES (v)
566 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
567 DECL_ATTRIBUTES (v));
568 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
571 break;
573 case RECORD_TYPE:
574 case UNION_TYPE:
575 case QUAL_UNION_TYPE:
576 if (TYPE_MAIN_VARIANT (type) != type
577 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
578 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
579 else
581 tree f, nf = NULL;
583 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
585 t = remap_decl (f, id);
586 DECL_CONTEXT (t) = new_tree;
587 DECL_CHAIN (t) = nf;
588 nf = t;
590 TYPE_FIELDS (new_tree) = nreverse (nf);
592 break;
594 case OFFSET_TYPE:
595 default:
596 /* Shouldn't have been thought variable sized. */
597 gcc_unreachable ();
600 /* All variants of type share the same size, so use the already remaped data. */
601 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
603 tree s = TYPE_SIZE (type);
604 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
605 tree su = TYPE_SIZE_UNIT (type);
606 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
607 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
608 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
609 || s == mvs);
610 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
611 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
612 || su == mvsu);
613 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
614 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
616 else
618 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
619 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
622 return new_tree;
625 /* Helper function for remap_type_2, called through walk_tree. */
627 static tree
628 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
630 copy_body_data *id = (copy_body_data *) data;
632 if (TYPE_P (*tp))
633 *walk_subtrees = 0;
635 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
636 return *tp;
638 return NULL_TREE;
641 /* Return true if TYPE needs to be remapped because remap_decl on any
642 needed embedded decl returns something other than that decl. */
644 static bool
645 remap_type_2 (tree type, copy_body_data *id)
647 tree t;
649 #define RETURN_TRUE_IF_VAR(T) \
650 do \
652 tree _t = (T); \
653 if (_t) \
655 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
656 return true; \
657 if (!TYPE_SIZES_GIMPLIFIED (type) \
658 && walk_tree (&_t, remap_type_3, id, NULL)) \
659 return true; \
662 while (0)
664 switch (TREE_CODE (type))
666 case POINTER_TYPE:
667 case REFERENCE_TYPE:
668 case FUNCTION_TYPE:
669 case METHOD_TYPE:
670 return remap_type_2 (TREE_TYPE (type), id);
672 case INTEGER_TYPE:
673 case REAL_TYPE:
674 case FIXED_POINT_TYPE:
675 case ENUMERAL_TYPE:
676 case BOOLEAN_TYPE:
677 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
678 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
679 return false;
681 case ARRAY_TYPE:
682 if (remap_type_2 (TREE_TYPE (type), id)
683 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
684 return true;
685 break;
687 case RECORD_TYPE:
688 case UNION_TYPE:
689 case QUAL_UNION_TYPE:
690 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
691 if (TREE_CODE (t) == FIELD_DECL)
693 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
694 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
695 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
696 if (TREE_CODE (type) == QUAL_UNION_TYPE)
697 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
699 break;
701 default:
702 return false;
705 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
706 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
707 return false;
708 #undef RETURN_TRUE_IF_VAR
711 tree
712 remap_type (tree type, copy_body_data *id)
714 tree *node;
715 tree tmp;
717 if (type == NULL)
718 return type;
720 /* See if we have remapped this type. */
721 node = id->decl_map->get (type);
722 if (node)
723 return *node;
725 /* The type only needs remapping if it's variably modified. */
726 if (! variably_modified_type_p (type, id->src_fn)
727 /* Don't remap if copy_decl method doesn't always return a new
728 decl and for all embedded decls returns the passed in decl. */
729 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
731 insert_decl_map (id, type, type);
732 return type;
735 id->remapping_type_depth++;
736 tmp = remap_type_1 (type, id);
737 id->remapping_type_depth--;
739 return tmp;
742 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
744 static bool
745 can_be_nonlocal (tree decl, copy_body_data *id)
747 /* We cannot duplicate function decls. */
748 if (TREE_CODE (decl) == FUNCTION_DECL)
749 return true;
751 /* Local static vars must be non-local or we get multiple declaration
752 problems. */
753 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
754 return true;
756 return false;
759 static tree
760 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
761 copy_body_data *id)
763 tree old_var;
764 tree new_decls = NULL_TREE;
766 /* Remap its variables. */
767 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
769 tree new_var;
771 if (can_be_nonlocal (old_var, id))
773 /* We need to add this variable to the local decls as otherwise
774 nothing else will do so. */
775 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
776 add_local_decl (cfun, old_var);
777 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
778 && !DECL_IGNORED_P (old_var)
779 && nonlocalized_list)
780 vec_safe_push (*nonlocalized_list, old_var);
781 continue;
784 /* Remap the variable. */
785 new_var = remap_decl (old_var, id);
787 /* If we didn't remap this variable, we can't mess with its
788 TREE_CHAIN. If we remapped this variable to the return slot, it's
789 already declared somewhere else, so don't declare it here. */
791 if (new_var == id->retvar)
793 else if (!new_var)
795 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
796 && !DECL_IGNORED_P (old_var)
797 && nonlocalized_list)
798 vec_safe_push (*nonlocalized_list, old_var);
800 else
802 gcc_assert (DECL_P (new_var));
803 DECL_CHAIN (new_var) = new_decls;
804 new_decls = new_var;
806 /* Also copy value-expressions. */
807 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
809 tree tem = DECL_VALUE_EXPR (new_var);
810 bool old_regimplify = id->regimplify;
811 id->remapping_type_depth++;
812 walk_tree (&tem, copy_tree_body_r, id, NULL);
813 id->remapping_type_depth--;
814 id->regimplify = old_regimplify;
815 SET_DECL_VALUE_EXPR (new_var, tem);
820 return nreverse (new_decls);
823 /* Copy the BLOCK to contain remapped versions of the variables
824 therein. And hook the new block into the block-tree. */
826 static void
827 remap_block (tree *block, copy_body_data *id)
829 tree old_block;
830 tree new_block;
832 /* Make the new block. */
833 old_block = *block;
834 new_block = make_node (BLOCK);
835 TREE_USED (new_block) = TREE_USED (old_block);
836 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
837 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
838 BLOCK_NONLOCALIZED_VARS (new_block)
839 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
840 *block = new_block;
842 /* Remap its variables. */
843 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
844 &BLOCK_NONLOCALIZED_VARS (new_block),
845 id);
847 if (id->transform_lang_insert_block)
848 id->transform_lang_insert_block (new_block);
850 /* Remember the remapped block. */
851 insert_decl_map (id, old_block, new_block);
854 /* Copy the whole block tree and root it in id->block. */
856 static tree
857 remap_blocks (tree block, copy_body_data *id)
859 tree t;
860 tree new_tree = block;
862 if (!block)
863 return NULL;
865 remap_block (&new_tree, id);
866 gcc_assert (new_tree != block);
867 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
868 prepend_lexical_block (new_tree, remap_blocks (t, id));
869 /* Blocks are in arbitrary order, but make things slightly prettier and do
870 not swap order when producing a copy. */
871 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
872 return new_tree;
875 /* Remap the block tree rooted at BLOCK to nothing. */
877 static void
878 remap_blocks_to_null (tree block, copy_body_data *id)
880 tree t;
881 insert_decl_map (id, block, NULL_TREE);
882 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
883 remap_blocks_to_null (t, id);
886 /* Remap the location info pointed to by LOCUS. */
888 static location_t
889 remap_location (location_t locus, copy_body_data *id)
891 if (LOCATION_BLOCK (locus))
893 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
894 gcc_assert (n);
895 if (*n)
896 return set_block (locus, *n);
899 locus = LOCATION_LOCUS (locus);
901 if (locus != UNKNOWN_LOCATION && id->block)
902 return set_block (locus, id->block);
904 return locus;
907 static void
908 copy_statement_list (tree *tp)
910 tree_stmt_iterator oi, ni;
911 tree new_tree;
913 new_tree = alloc_stmt_list ();
914 ni = tsi_start (new_tree);
915 oi = tsi_start (*tp);
916 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
917 *tp = new_tree;
919 for (; !tsi_end_p (oi); tsi_next (&oi))
921 tree stmt = tsi_stmt (oi);
922 if (TREE_CODE (stmt) == STATEMENT_LIST)
923 /* This copy is not redundant; tsi_link_after will smash this
924 STATEMENT_LIST into the end of the one we're building, and we
925 don't want to do that with the original. */
926 copy_statement_list (&stmt);
927 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
931 static void
932 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
934 tree block = BIND_EXPR_BLOCK (*tp);
935 /* Copy (and replace) the statement. */
936 copy_tree_r (tp, walk_subtrees, NULL);
937 if (block)
939 remap_block (&block, id);
940 BIND_EXPR_BLOCK (*tp) = block;
943 if (BIND_EXPR_VARS (*tp))
944 /* This will remap a lot of the same decls again, but this should be
945 harmless. */
946 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
950 /* Create a new gimple_seq by remapping all the statements in BODY
951 using the inlining information in ID. */
953 static gimple_seq
954 remap_gimple_seq (gimple_seq body, copy_body_data *id)
956 gimple_stmt_iterator si;
957 gimple_seq new_body = NULL;
959 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
961 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
962 gimple_seq_add_seq (&new_body, new_stmts);
965 return new_body;
969 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
970 block using the mapping information in ID. */
972 static gimple *
973 copy_gimple_bind (gbind *stmt, copy_body_data *id)
975 gimple *new_bind;
976 tree new_block, new_vars;
977 gimple_seq body, new_body;
979 /* Copy the statement. Note that we purposely don't use copy_stmt
980 here because we need to remap statements as we copy. */
981 body = gimple_bind_body (stmt);
982 new_body = remap_gimple_seq (body, id);
984 new_block = gimple_bind_block (stmt);
985 if (new_block)
986 remap_block (&new_block, id);
988 /* This will remap a lot of the same decls again, but this should be
989 harmless. */
990 new_vars = gimple_bind_vars (stmt);
991 if (new_vars)
992 new_vars = remap_decls (new_vars, NULL, id);
994 new_bind = gimple_build_bind (new_vars, new_body, new_block);
996 return new_bind;
999 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
1001 static bool
1002 is_parm (tree decl)
1004 if (TREE_CODE (decl) == SSA_NAME)
1006 decl = SSA_NAME_VAR (decl);
1007 if (!decl)
1008 return false;
1011 return (TREE_CODE (decl) == PARM_DECL);
1014 /* Remap the dependence CLIQUE from the source to the destination function
1015 as specified in ID. */
1017 static unsigned short
1018 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1020 if (clique == 0 || processing_debug_stmt)
1021 return 0;
1022 if (!id->dependence_map)
1023 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1024 bool existed;
1025 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1026 if (!existed)
1028 /* Clique 1 is reserved for local ones set by PTA. */
1029 if (cfun->last_clique == 0)
1030 cfun->last_clique = 1;
1031 newc = ++cfun->last_clique;
1033 return newc;
1036 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1037 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1038 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1039 recursing into the children nodes of *TP. */
1041 static tree
1042 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1044 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1045 copy_body_data *id = (copy_body_data *) wi_p->info;
1046 tree fn = id->src_fn;
1048 /* For recursive invocations this is no longer the LHS itself. */
1049 bool is_lhs = wi_p->is_lhs;
1050 wi_p->is_lhs = false;
1052 if (TREE_CODE (*tp) == SSA_NAME)
1054 *tp = remap_ssa_name (*tp, id);
1055 *walk_subtrees = 0;
1056 if (is_lhs)
1057 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1058 return NULL;
1060 else if (auto_var_in_fn_p (*tp, fn))
1062 /* Local variables and labels need to be replaced by equivalent
1063 variables. We don't want to copy static variables; there's
1064 only one of those, no matter how many times we inline the
1065 containing function. Similarly for globals from an outer
1066 function. */
1067 tree new_decl;
1069 /* Remap the declaration. */
1070 new_decl = remap_decl (*tp, id);
1071 gcc_assert (new_decl);
1072 /* Replace this variable with the copy. */
1073 STRIP_TYPE_NOPS (new_decl);
1074 /* ??? The C++ frontend uses void * pointer zero to initialize
1075 any other type. This confuses the middle-end type verification.
1076 As cloned bodies do not go through gimplification again the fixup
1077 there doesn't trigger. */
1078 if (TREE_CODE (new_decl) == INTEGER_CST
1079 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1080 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1081 *tp = new_decl;
1082 *walk_subtrees = 0;
1084 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1085 gcc_unreachable ();
1086 else if (TREE_CODE (*tp) == SAVE_EXPR)
1087 gcc_unreachable ();
1088 else if (TREE_CODE (*tp) == LABEL_DECL
1089 && (!DECL_CONTEXT (*tp)
1090 || decl_function_context (*tp) == id->src_fn))
1091 /* These may need to be remapped for EH handling. */
1092 *tp = remap_decl (*tp, id);
1093 else if (TREE_CODE (*tp) == FIELD_DECL)
1095 /* If the enclosing record type is variably_modified_type_p, the field
1096 has already been remapped. Otherwise, it need not be. */
1097 tree *n = id->decl_map->get (*tp);
1098 if (n)
1099 *tp = *n;
1100 *walk_subtrees = 0;
1102 else if (TYPE_P (*tp))
1103 /* Types may need remapping as well. */
1104 *tp = remap_type (*tp, id);
1105 else if (CONSTANT_CLASS_P (*tp))
1107 /* If this is a constant, we have to copy the node iff the type
1108 will be remapped. copy_tree_r will not copy a constant. */
1109 tree new_type = remap_type (TREE_TYPE (*tp), id);
1111 if (new_type == TREE_TYPE (*tp))
1112 *walk_subtrees = 0;
1114 else if (TREE_CODE (*tp) == INTEGER_CST)
1115 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1116 else
1118 *tp = copy_node (*tp);
1119 TREE_TYPE (*tp) = new_type;
1122 else
1124 /* Otherwise, just copy the node. Note that copy_tree_r already
1125 knows not to copy VAR_DECLs, etc., so this is safe. */
1127 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1129 /* We need to re-canonicalize MEM_REFs from inline substitutions
1130 that can happen when a pointer argument is an ADDR_EXPR.
1131 Recurse here manually to allow that. */
1132 tree ptr = TREE_OPERAND (*tp, 0);
1133 tree type = remap_type (TREE_TYPE (*tp), id);
1134 tree old = *tp;
1135 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1136 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1137 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1138 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1139 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1140 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1142 MR_DEPENDENCE_CLIQUE (*tp)
1143 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1144 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1146 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1147 remapped a parameter as the property might be valid only
1148 for the parameter itself. */
1149 if (TREE_THIS_NOTRAP (old)
1150 && (!is_parm (TREE_OPERAND (old, 0))
1151 || (!id->transform_parameter && is_parm (ptr))))
1152 TREE_THIS_NOTRAP (*tp) = 1;
1153 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1154 *walk_subtrees = 0;
1155 return NULL;
1158 /* Here is the "usual case". Copy this tree node, and then
1159 tweak some special cases. */
1160 copy_tree_r (tp, walk_subtrees, NULL);
1162 if (TREE_CODE (*tp) != OMP_CLAUSE)
1163 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1165 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1167 /* The copied TARGET_EXPR has never been expanded, even if the
1168 original node was expanded already. */
1169 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1170 TREE_OPERAND (*tp, 3) = NULL_TREE;
1172 else if (TREE_CODE (*tp) == ADDR_EXPR)
1174 /* Variable substitution need not be simple. In particular,
1175 the MEM_REF substitution above. Make sure that
1176 TREE_CONSTANT and friends are up-to-date. */
1177 int invariant = is_gimple_min_invariant (*tp);
1178 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1179 recompute_tree_invariant_for_addr_expr (*tp);
1181 /* If this used to be invariant, but is not any longer,
1182 then regimplification is probably needed. */
1183 if (invariant && !is_gimple_min_invariant (*tp))
1184 id->regimplify = true;
1186 *walk_subtrees = 0;
1190 /* Update the TREE_BLOCK for the cloned expr. */
1191 if (EXPR_P (*tp))
1193 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1194 tree old_block = TREE_BLOCK (*tp);
1195 if (old_block)
1197 tree *n;
1198 n = id->decl_map->get (TREE_BLOCK (*tp));
1199 if (n)
1200 new_block = *n;
1202 TREE_SET_BLOCK (*tp, new_block);
1205 /* Keep iterating. */
1206 return NULL_TREE;
1210 /* Called from copy_body_id via walk_tree. DATA is really a
1211 `copy_body_data *'. */
1213 tree
1214 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1216 copy_body_data *id = (copy_body_data *) data;
1217 tree fn = id->src_fn;
1218 tree new_block;
1220 /* Begin by recognizing trees that we'll completely rewrite for the
1221 inlining context. Our output for these trees is completely
1222 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1223 into an edge). Further down, we'll handle trees that get
1224 duplicated and/or tweaked. */
1226 /* When requested, RETURN_EXPRs should be transformed to just the
1227 contained MODIFY_EXPR. The branch semantics of the return will
1228 be handled elsewhere by manipulating the CFG rather than a statement. */
1229 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1231 tree assignment = TREE_OPERAND (*tp, 0);
1233 /* If we're returning something, just turn that into an
1234 assignment into the equivalent of the original RESULT_DECL.
1235 If the "assignment" is just the result decl, the result
1236 decl has already been set (e.g. a recent "foo (&result_decl,
1237 ...)"); just toss the entire RETURN_EXPR. */
1238 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1240 /* Replace the RETURN_EXPR with (a copy of) the
1241 MODIFY_EXPR hanging underneath. */
1242 *tp = copy_node (assignment);
1244 else /* Else the RETURN_EXPR returns no value. */
1246 *tp = NULL;
1247 return (tree) (void *)1;
1250 else if (TREE_CODE (*tp) == SSA_NAME)
1252 *tp = remap_ssa_name (*tp, id);
1253 *walk_subtrees = 0;
1254 return NULL;
1257 /* Local variables and labels need to be replaced by equivalent
1258 variables. We don't want to copy static variables; there's only
1259 one of those, no matter how many times we inline the containing
1260 function. Similarly for globals from an outer function. */
1261 else if (auto_var_in_fn_p (*tp, fn))
1263 tree new_decl;
1265 /* Remap the declaration. */
1266 new_decl = remap_decl (*tp, id);
1267 gcc_assert (new_decl);
1268 /* Replace this variable with the copy. */
1269 STRIP_TYPE_NOPS (new_decl);
1270 *tp = new_decl;
1271 *walk_subtrees = 0;
1273 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1274 copy_statement_list (tp);
1275 else if (TREE_CODE (*tp) == SAVE_EXPR
1276 || TREE_CODE (*tp) == TARGET_EXPR)
1277 remap_save_expr (tp, id->decl_map, walk_subtrees);
1278 else if (TREE_CODE (*tp) == LABEL_DECL
1279 && (! DECL_CONTEXT (*tp)
1280 || decl_function_context (*tp) == id->src_fn))
1281 /* These may need to be remapped for EH handling. */
1282 *tp = remap_decl (*tp, id);
1283 else if (TREE_CODE (*tp) == BIND_EXPR)
1284 copy_bind_expr (tp, walk_subtrees, id);
1285 /* Types may need remapping as well. */
1286 else if (TYPE_P (*tp))
1287 *tp = remap_type (*tp, id);
1289 /* If this is a constant, we have to copy the node iff the type will be
1290 remapped. copy_tree_r will not copy a constant. */
1291 else if (CONSTANT_CLASS_P (*tp))
1293 tree new_type = remap_type (TREE_TYPE (*tp), id);
1295 if (new_type == TREE_TYPE (*tp))
1296 *walk_subtrees = 0;
1298 else if (TREE_CODE (*tp) == INTEGER_CST)
1299 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1300 else
1302 *tp = copy_node (*tp);
1303 TREE_TYPE (*tp) = new_type;
1307 /* Otherwise, just copy the node. Note that copy_tree_r already
1308 knows not to copy VAR_DECLs, etc., so this is safe. */
1309 else
1311 /* Here we handle trees that are not completely rewritten.
1312 First we detect some inlining-induced bogosities for
1313 discarding. */
1314 if (TREE_CODE (*tp) == MODIFY_EXPR
1315 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1316 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1318 /* Some assignments VAR = VAR; don't generate any rtl code
1319 and thus don't count as variable modification. Avoid
1320 keeping bogosities like 0 = 0. */
1321 tree decl = TREE_OPERAND (*tp, 0), value;
1322 tree *n;
1324 n = id->decl_map->get (decl);
1325 if (n)
1327 value = *n;
1328 STRIP_TYPE_NOPS (value);
1329 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1331 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1332 return copy_tree_body_r (tp, walk_subtrees, data);
1336 else if (TREE_CODE (*tp) == INDIRECT_REF)
1338 /* Get rid of *& from inline substitutions that can happen when a
1339 pointer argument is an ADDR_EXPR. */
1340 tree decl = TREE_OPERAND (*tp, 0);
1341 tree *n = id->decl_map->get (decl);
1342 if (n)
1344 /* If we happen to get an ADDR_EXPR in n->value, strip
1345 it manually here as we'll eventually get ADDR_EXPRs
1346 which lie about their types pointed to. In this case
1347 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1348 but we absolutely rely on that. As fold_indirect_ref
1349 does other useful transformations, try that first, though. */
1350 tree type = TREE_TYPE (*tp);
1351 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1352 tree old = *tp;
1353 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1354 if (! *tp)
1356 type = remap_type (type, id);
1357 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1360 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1361 /* ??? We should either assert here or build
1362 a VIEW_CONVERT_EXPR instead of blindly leaking
1363 incompatible types to our IL. */
1364 if (! *tp)
1365 *tp = TREE_OPERAND (ptr, 0);
1367 else
1369 *tp = build1 (INDIRECT_REF, type, ptr);
1370 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1371 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1372 TREE_READONLY (*tp) = TREE_READONLY (old);
1373 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1374 have remapped a parameter as the property might be
1375 valid only for the parameter itself. */
1376 if (TREE_THIS_NOTRAP (old)
1377 && (!is_parm (TREE_OPERAND (old, 0))
1378 || (!id->transform_parameter && is_parm (ptr))))
1379 TREE_THIS_NOTRAP (*tp) = 1;
1382 *walk_subtrees = 0;
1383 return NULL;
1386 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1388 /* We need to re-canonicalize MEM_REFs from inline substitutions
1389 that can happen when a pointer argument is an ADDR_EXPR.
1390 Recurse here manually to allow that. */
1391 tree ptr = TREE_OPERAND (*tp, 0);
1392 tree type = remap_type (TREE_TYPE (*tp), id);
1393 tree old = *tp;
1394 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1395 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1396 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1397 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1398 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1399 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1401 MR_DEPENDENCE_CLIQUE (*tp)
1402 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1403 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1405 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1406 remapped a parameter as the property might be valid only
1407 for the parameter itself. */
1408 if (TREE_THIS_NOTRAP (old)
1409 && (!is_parm (TREE_OPERAND (old, 0))
1410 || (!id->transform_parameter && is_parm (ptr))))
1411 TREE_THIS_NOTRAP (*tp) = 1;
1412 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1413 *walk_subtrees = 0;
1414 return NULL;
1417 /* Here is the "usual case". Copy this tree node, and then
1418 tweak some special cases. */
1419 copy_tree_r (tp, walk_subtrees, NULL);
1421 /* If EXPR has block defined, map it to newly constructed block.
1422 When inlining we want EXPRs without block appear in the block
1423 of function call if we are not remapping a type. */
1424 if (EXPR_P (*tp))
1426 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1427 if (TREE_BLOCK (*tp))
1429 tree *n;
1430 n = id->decl_map->get (TREE_BLOCK (*tp));
1431 if (n)
1432 new_block = *n;
1434 TREE_SET_BLOCK (*tp, new_block);
1437 if (TREE_CODE (*tp) != OMP_CLAUSE)
1438 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1440 /* The copied TARGET_EXPR has never been expanded, even if the
1441 original node was expanded already. */
1442 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1444 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1445 TREE_OPERAND (*tp, 3) = NULL_TREE;
1448 /* Variable substitution need not be simple. In particular, the
1449 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1450 and friends are up-to-date. */
1451 else if (TREE_CODE (*tp) == ADDR_EXPR)
1453 int invariant = is_gimple_min_invariant (*tp);
1454 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1456 /* Handle the case where we substituted an INDIRECT_REF
1457 into the operand of the ADDR_EXPR. */
1458 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1459 && !id->do_not_fold)
1461 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1462 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1463 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1464 *tp = t;
1466 else
1467 recompute_tree_invariant_for_addr_expr (*tp);
1469 /* If this used to be invariant, but is not any longer,
1470 then regimplification is probably needed. */
1471 if (invariant && !is_gimple_min_invariant (*tp))
1472 id->regimplify = true;
1474 *walk_subtrees = 0;
1478 /* Keep iterating. */
1479 return NULL_TREE;
1482 /* Helper for remap_gimple_stmt. Given an EH region number for the
1483 source function, map that to the duplicate EH region number in
1484 the destination function. */
1486 static int
1487 remap_eh_region_nr (int old_nr, copy_body_data *id)
1489 eh_region old_r, new_r;
1491 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1492 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1494 return new_r->index;
1497 /* Similar, but operate on INTEGER_CSTs. */
1499 static tree
1500 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1502 int old_nr, new_nr;
1504 old_nr = tree_to_shwi (old_t_nr);
1505 new_nr = remap_eh_region_nr (old_nr, id);
1507 return build_int_cst (integer_type_node, new_nr);
1510 /* Helper for copy_bb. Remap statement STMT using the inlining
1511 information in ID. Return the new statement copy. */
1513 static gimple_seq
1514 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1516 gimple *copy = NULL;
1517 struct walk_stmt_info wi;
1518 bool skip_first = false;
1519 gimple_seq stmts = NULL;
1521 if (is_gimple_debug (stmt)
1522 && (gimple_debug_nonbind_marker_p (stmt)
1523 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1524 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1525 return NULL;
1527 /* Begin by recognizing trees that we'll completely rewrite for the
1528 inlining context. Our output for these trees is completely
1529 different from our input (e.g. RETURN_EXPR is deleted and morphs
1530 into an edge). Further down, we'll handle trees that get
1531 duplicated and/or tweaked. */
1533 /* When requested, GIMPLE_RETURN should be transformed to just the
1534 contained GIMPLE_ASSIGN. The branch semantics of the return will
1535 be handled elsewhere by manipulating the CFG rather than the
1536 statement. */
1537 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1539 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1541 /* If we're returning something, just turn that into an
1542 assignment to the equivalent of the original RESULT_DECL.
1543 If RETVAL is just the result decl, the result decl has
1544 already been set (e.g. a recent "foo (&result_decl, ...)");
1545 just toss the entire GIMPLE_RETURN. */
1546 if (retval
1547 && (TREE_CODE (retval) != RESULT_DECL
1548 && (TREE_CODE (retval) != SSA_NAME
1549 || ! SSA_NAME_VAR (retval)
1550 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1552 copy = gimple_build_assign (id->do_not_unshare
1553 ? id->retvar : unshare_expr (id->retvar),
1554 retval);
1555 /* id->retvar is already substituted. Skip it on later remapping. */
1556 skip_first = true;
1558 else
1559 return NULL;
1561 else if (gimple_has_substatements (stmt))
1563 gimple_seq s1, s2;
1565 /* When cloning bodies from the C++ front end, we will be handed bodies
1566 in High GIMPLE form. Handle here all the High GIMPLE statements that
1567 have embedded statements. */
1568 switch (gimple_code (stmt))
1570 case GIMPLE_BIND:
1571 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1572 break;
1574 case GIMPLE_CATCH:
1576 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1577 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1578 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1580 break;
1582 case GIMPLE_EH_FILTER:
1583 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1584 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1585 break;
1587 case GIMPLE_TRY:
1588 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1589 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1590 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1591 break;
1593 case GIMPLE_WITH_CLEANUP_EXPR:
1594 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1595 copy = gimple_build_wce (s1);
1596 break;
1598 case GIMPLE_OMP_PARALLEL:
1600 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1601 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1602 copy = gimple_build_omp_parallel
1603 (s1,
1604 gimple_omp_parallel_clauses (omp_par_stmt),
1605 gimple_omp_parallel_child_fn (omp_par_stmt),
1606 gimple_omp_parallel_data_arg (omp_par_stmt));
1608 break;
1610 case GIMPLE_OMP_TASK:
1611 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1612 copy = gimple_build_omp_task
1613 (s1,
1614 gimple_omp_task_clauses (stmt),
1615 gimple_omp_task_child_fn (stmt),
1616 gimple_omp_task_data_arg (stmt),
1617 gimple_omp_task_copy_fn (stmt),
1618 gimple_omp_task_arg_size (stmt),
1619 gimple_omp_task_arg_align (stmt));
1620 break;
1622 case GIMPLE_OMP_FOR:
1623 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1624 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1625 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1626 gimple_omp_for_clauses (stmt),
1627 gimple_omp_for_collapse (stmt), s2);
1629 size_t i;
1630 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1632 gimple_omp_for_set_index (copy, i,
1633 gimple_omp_for_index (stmt, i));
1634 gimple_omp_for_set_initial (copy, i,
1635 gimple_omp_for_initial (stmt, i));
1636 gimple_omp_for_set_final (copy, i,
1637 gimple_omp_for_final (stmt, i));
1638 gimple_omp_for_set_incr (copy, i,
1639 gimple_omp_for_incr (stmt, i));
1640 gimple_omp_for_set_cond (copy, i,
1641 gimple_omp_for_cond (stmt, i));
1644 break;
1646 case GIMPLE_OMP_MASTER:
1647 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1648 copy = gimple_build_omp_master (s1);
1649 break;
1651 case GIMPLE_OMP_TASKGROUP:
1652 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1653 copy = gimple_build_omp_taskgroup
1654 (s1, gimple_omp_taskgroup_clauses (stmt));
1655 break;
1657 case GIMPLE_OMP_ORDERED:
1658 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1659 copy = gimple_build_omp_ordered
1660 (s1,
1661 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1662 break;
1664 case GIMPLE_OMP_SCAN:
1665 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1666 copy = gimple_build_omp_scan
1667 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1668 break;
1670 case GIMPLE_OMP_SECTION:
1671 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1672 copy = gimple_build_omp_section (s1);
1673 break;
1675 case GIMPLE_OMP_SECTIONS:
1676 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1677 copy = gimple_build_omp_sections
1678 (s1, gimple_omp_sections_clauses (stmt));
1679 break;
1681 case GIMPLE_OMP_SINGLE:
1682 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1683 copy = gimple_build_omp_single
1684 (s1, gimple_omp_single_clauses (stmt));
1685 break;
1687 case GIMPLE_OMP_TARGET:
1688 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1689 copy = gimple_build_omp_target
1690 (s1, gimple_omp_target_kind (stmt),
1691 gimple_omp_target_clauses (stmt));
1692 break;
1694 case GIMPLE_OMP_TEAMS:
1695 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1696 copy = gimple_build_omp_teams
1697 (s1, gimple_omp_teams_clauses (stmt));
1698 break;
1700 case GIMPLE_OMP_CRITICAL:
1701 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1702 copy = gimple_build_omp_critical (s1,
1703 gimple_omp_critical_name
1704 (as_a <gomp_critical *> (stmt)),
1705 gimple_omp_critical_clauses
1706 (as_a <gomp_critical *> (stmt)));
1707 break;
1709 case GIMPLE_TRANSACTION:
1711 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1712 gtransaction *new_trans_stmt;
1713 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1714 id);
1715 copy = new_trans_stmt = gimple_build_transaction (s1);
1716 gimple_transaction_set_subcode (new_trans_stmt,
1717 gimple_transaction_subcode (old_trans_stmt));
1718 gimple_transaction_set_label_norm (new_trans_stmt,
1719 gimple_transaction_label_norm (old_trans_stmt));
1720 gimple_transaction_set_label_uninst (new_trans_stmt,
1721 gimple_transaction_label_uninst (old_trans_stmt));
1722 gimple_transaction_set_label_over (new_trans_stmt,
1723 gimple_transaction_label_over (old_trans_stmt));
1725 break;
1727 default:
1728 gcc_unreachable ();
1731 else
1733 if (gimple_assign_copy_p (stmt)
1734 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1735 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1737 /* Here we handle statements that are not completely rewritten.
1738 First we detect some inlining-induced bogosities for
1739 discarding. */
1741 /* Some assignments VAR = VAR; don't generate any rtl code
1742 and thus don't count as variable modification. Avoid
1743 keeping bogosities like 0 = 0. */
1744 tree decl = gimple_assign_lhs (stmt), value;
1745 tree *n;
1747 n = id->decl_map->get (decl);
1748 if (n)
1750 value = *n;
1751 STRIP_TYPE_NOPS (value);
1752 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1753 return NULL;
1757 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1758 in a block that we aren't copying during tree_function_versioning,
1759 just drop the clobber stmt. */
1760 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1762 tree lhs = gimple_assign_lhs (stmt);
1763 if (TREE_CODE (lhs) == MEM_REF
1764 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1766 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1767 if (gimple_bb (def_stmt)
1768 && !bitmap_bit_p (id->blocks_to_copy,
1769 gimple_bb (def_stmt)->index))
1770 return NULL;
1774 /* We do not allow CLOBBERs of handled components. In case
1775 returned value is stored via such handled component, remove
1776 the clobber so stmt verifier is happy. */
1777 if (gimple_clobber_p (stmt)
1778 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1780 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1781 if (!DECL_P (remapped)
1782 && TREE_CODE (remapped) != MEM_REF)
1783 return NULL;
1786 if (gimple_debug_bind_p (stmt))
1788 gdebug *copy
1789 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1790 gimple_debug_bind_get_value (stmt),
1791 stmt);
1792 if (id->reset_location)
1793 gimple_set_location (copy, input_location);
1794 id->debug_stmts.safe_push (copy);
1795 gimple_seq_add_stmt (&stmts, copy);
1796 return stmts;
1798 if (gimple_debug_source_bind_p (stmt))
1800 gdebug *copy = gimple_build_debug_source_bind
1801 (gimple_debug_source_bind_get_var (stmt),
1802 gimple_debug_source_bind_get_value (stmt),
1803 stmt);
1804 if (id->reset_location)
1805 gimple_set_location (copy, input_location);
1806 id->debug_stmts.safe_push (copy);
1807 gimple_seq_add_stmt (&stmts, copy);
1808 return stmts;
1810 if (gimple_debug_nonbind_marker_p (stmt))
1812 /* If the inlined function has too many debug markers,
1813 don't copy them. */
1814 if (id->src_cfun->debug_marker_count
1815 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1816 return stmts;
1818 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1819 if (id->reset_location)
1820 gimple_set_location (copy, input_location);
1821 id->debug_stmts.safe_push (copy);
1822 gimple_seq_add_stmt (&stmts, copy);
1823 return stmts;
1826 /* Create a new deep copy of the statement. */
1827 copy = gimple_copy (stmt);
1829 /* Clear flags that need revisiting. */
1830 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1832 if (gimple_call_tail_p (call_stmt))
1833 gimple_call_set_tail (call_stmt, false);
1834 if (gimple_call_from_thunk_p (call_stmt))
1835 gimple_call_set_from_thunk (call_stmt, false);
1836 if (gimple_call_internal_p (call_stmt))
1837 switch (gimple_call_internal_fn (call_stmt))
1839 case IFN_GOMP_SIMD_LANE:
1840 case IFN_GOMP_SIMD_VF:
1841 case IFN_GOMP_SIMD_LAST_LANE:
1842 case IFN_GOMP_SIMD_ORDERED_START:
1843 case IFN_GOMP_SIMD_ORDERED_END:
1844 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1845 break;
1846 default:
1847 break;
1851 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1852 RESX and EH_DISPATCH. */
1853 if (id->eh_map)
1854 switch (gimple_code (copy))
1856 case GIMPLE_CALL:
1858 tree r, fndecl = gimple_call_fndecl (copy);
1859 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1860 switch (DECL_FUNCTION_CODE (fndecl))
1862 case BUILT_IN_EH_COPY_VALUES:
1863 r = gimple_call_arg (copy, 1);
1864 r = remap_eh_region_tree_nr (r, id);
1865 gimple_call_set_arg (copy, 1, r);
1866 /* FALLTHRU */
1868 case BUILT_IN_EH_POINTER:
1869 case BUILT_IN_EH_FILTER:
1870 r = gimple_call_arg (copy, 0);
1871 r = remap_eh_region_tree_nr (r, id);
1872 gimple_call_set_arg (copy, 0, r);
1873 break;
1875 default:
1876 break;
1879 /* Reset alias info if we didn't apply measures to
1880 keep it valid over inlining by setting DECL_PT_UID. */
1881 if (!id->src_cfun->gimple_df
1882 || !id->src_cfun->gimple_df->ipa_pta)
1883 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1885 break;
1887 case GIMPLE_RESX:
1889 gresx *resx_stmt = as_a <gresx *> (copy);
1890 int r = gimple_resx_region (resx_stmt);
1891 r = remap_eh_region_nr (r, id);
1892 gimple_resx_set_region (resx_stmt, r);
1894 break;
1896 case GIMPLE_EH_DISPATCH:
1898 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1899 int r = gimple_eh_dispatch_region (eh_dispatch);
1900 r = remap_eh_region_nr (r, id);
1901 gimple_eh_dispatch_set_region (eh_dispatch, r);
1903 break;
1905 default:
1906 break;
1910 /* If STMT has a block defined, map it to the newly constructed block. */
1911 if (tree block = gimple_block (copy))
1913 tree *n;
1914 n = id->decl_map->get (block);
1915 gcc_assert (n);
1916 gimple_set_block (copy, *n);
1918 if (id->param_body_adjs)
1920 gimple_seq extra_stmts = NULL;
1921 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
1922 if (!gimple_seq_empty_p (extra_stmts))
1924 memset (&wi, 0, sizeof (wi));
1925 wi.info = id;
1926 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1927 !gsi_end_p (egsi);
1928 gsi_next (&egsi))
1929 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1930 gimple_seq_add_seq (&stmts, extra_stmts);
1934 if (id->reset_location)
1935 gimple_set_location (copy, input_location);
1937 /* Debug statements ought to be rebuilt and not copied. */
1938 gcc_checking_assert (!is_gimple_debug (copy));
1940 /* Remap all the operands in COPY. */
1941 memset (&wi, 0, sizeof (wi));
1942 wi.info = id;
1943 if (skip_first)
1944 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1945 else
1946 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1948 /* Clear the copied virtual operands. We are not remapping them here
1949 but are going to recreate them from scratch. */
1950 if (gimple_has_mem_ops (copy))
1952 gimple_set_vdef (copy, NULL_TREE);
1953 gimple_set_vuse (copy, NULL_TREE);
1956 gimple_seq_add_stmt (&stmts, copy);
1957 return stmts;
1961 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1962 later */
1964 static basic_block
1965 copy_bb (copy_body_data *id, basic_block bb,
1966 profile_count num, profile_count den)
1968 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1969 basic_block copy_basic_block;
1970 tree decl;
1971 basic_block prev;
1973 profile_count::adjust_for_ipa_scaling (&num, &den);
1975 /* Search for previous copied basic block. */
1976 prev = bb->prev_bb;
1977 while (!prev->aux)
1978 prev = prev->prev_bb;
1980 /* create_basic_block() will append every new block to
1981 basic_block_info automatically. */
1982 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1983 copy_basic_block->count = bb->count.apply_scale (num, den);
1985 copy_gsi = gsi_start_bb (copy_basic_block);
1987 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1989 gimple_seq stmts;
1990 gimple *stmt = gsi_stmt (gsi);
1991 gimple *orig_stmt = stmt;
1992 gimple_stmt_iterator stmts_gsi;
1993 bool stmt_added = false;
1995 id->regimplify = false;
1996 stmts = remap_gimple_stmt (stmt, id);
1998 if (gimple_seq_empty_p (stmts))
1999 continue;
2001 seq_gsi = copy_gsi;
2003 for (stmts_gsi = gsi_start (stmts);
2004 !gsi_end_p (stmts_gsi); )
2006 stmt = gsi_stmt (stmts_gsi);
2008 /* Advance iterator now before stmt is moved to seq_gsi. */
2009 gsi_next (&stmts_gsi);
2011 if (gimple_nop_p (stmt))
2012 continue;
2014 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2015 orig_stmt);
2017 /* With return slot optimization we can end up with
2018 non-gimple (foo *)&this->m, fix that here. */
2019 if (is_gimple_assign (stmt)
2020 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2021 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2023 tree new_rhs;
2024 new_rhs = force_gimple_operand_gsi (&seq_gsi,
2025 gimple_assign_rhs1 (stmt),
2026 true, NULL, false,
2027 GSI_CONTINUE_LINKING);
2028 gimple_assign_set_rhs1 (stmt, new_rhs);
2029 id->regimplify = false;
2032 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2034 if (id->regimplify)
2035 gimple_regimplify_operands (stmt, &seq_gsi);
2037 stmt_added = true;
2040 if (!stmt_added)
2041 continue;
2043 /* If copy_basic_block has been empty at the start of this iteration,
2044 call gsi_start_bb again to get at the newly added statements. */
2045 if (gsi_end_p (copy_gsi))
2046 copy_gsi = gsi_start_bb (copy_basic_block);
2047 else
2048 gsi_next (&copy_gsi);
2050 /* Process the new statement. The call to gimple_regimplify_operands
2051 possibly turned the statement into multiple statements, we
2052 need to process all of them. */
2055 tree fn;
2056 gcall *call_stmt;
2058 stmt = gsi_stmt (copy_gsi);
2059 call_stmt = dyn_cast <gcall *> (stmt);
2060 if (call_stmt
2061 && gimple_call_va_arg_pack_p (call_stmt)
2062 && id->call_stmt
2063 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2065 /* __builtin_va_arg_pack () should be replaced by
2066 all arguments corresponding to ... in the caller. */
2067 tree p;
2068 gcall *new_call;
2069 vec<tree> argarray;
2070 size_t nargs = gimple_call_num_args (id->call_stmt);
2071 size_t n;
2073 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2074 nargs--;
2076 /* Create the new array of arguments. */
2077 n = nargs + gimple_call_num_args (call_stmt);
2078 argarray.create (n);
2079 argarray.safe_grow_cleared (n);
2081 /* Copy all the arguments before '...' */
2082 memcpy (argarray.address (),
2083 gimple_call_arg_ptr (call_stmt, 0),
2084 gimple_call_num_args (call_stmt) * sizeof (tree));
2086 /* Append the arguments passed in '...' */
2087 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2088 gimple_call_arg_ptr (id->call_stmt, 0)
2089 + (gimple_call_num_args (id->call_stmt) - nargs),
2090 nargs * sizeof (tree));
2092 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2093 argarray);
2095 argarray.release ();
2097 /* Copy all GIMPLE_CALL flags, location and block, except
2098 GF_CALL_VA_ARG_PACK. */
2099 gimple_call_copy_flags (new_call, call_stmt);
2100 gimple_call_set_va_arg_pack (new_call, false);
2101 /* location includes block. */
2102 gimple_set_location (new_call, gimple_location (stmt));
2103 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2105 gsi_replace (&copy_gsi, new_call, false);
2106 stmt = new_call;
2108 else if (call_stmt
2109 && id->call_stmt
2110 && (decl = gimple_call_fndecl (stmt))
2111 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2113 /* __builtin_va_arg_pack_len () should be replaced by
2114 the number of anonymous arguments. */
2115 size_t nargs = gimple_call_num_args (id->call_stmt);
2116 tree count, p;
2117 gimple *new_stmt;
2119 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2120 nargs--;
2122 if (!gimple_call_lhs (stmt))
2124 /* Drop unused calls. */
2125 gsi_remove (&copy_gsi, false);
2126 continue;
2128 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2130 count = build_int_cst (integer_type_node, nargs);
2131 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2132 gsi_replace (&copy_gsi, new_stmt, false);
2133 stmt = new_stmt;
2135 else if (nargs != 0)
2137 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2138 count = build_int_cst (integer_type_node, nargs);
2139 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2140 PLUS_EXPR, newlhs, count);
2141 gimple_call_set_lhs (stmt, newlhs);
2142 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2145 else if (call_stmt
2146 && id->call_stmt
2147 && gimple_call_internal_p (stmt)
2148 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2150 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2151 gsi_remove (&copy_gsi, false);
2152 continue;
2155 /* Statements produced by inlining can be unfolded, especially
2156 when we constant propagated some operands. We can't fold
2157 them right now for two reasons:
2158 1) folding require SSA_NAME_DEF_STMTs to be correct
2159 2) we can't change function calls to builtins.
2160 So we just mark statement for later folding. We mark
2161 all new statements, instead just statements that has changed
2162 by some nontrivial substitution so even statements made
2163 foldable indirectly are updated. If this turns out to be
2164 expensive, copy_body can be told to watch for nontrivial
2165 changes. */
2166 if (id->statements_to_fold)
2167 id->statements_to_fold->add (stmt);
2169 /* We're duplicating a CALL_EXPR. Find any corresponding
2170 callgraph edges and update or duplicate them. */
2171 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2173 struct cgraph_edge *edge;
2175 switch (id->transform_call_graph_edges)
2177 case CB_CGE_DUPLICATE:
2178 edge = id->src_node->get_edge (orig_stmt);
2179 if (edge)
2181 struct cgraph_edge *old_edge = edge;
2182 profile_count old_cnt = edge->count;
2183 edge = edge->clone (id->dst_node, call_stmt,
2184 gimple_uid (stmt),
2185 num, den,
2186 true);
2188 /* Speculative calls consist of two edges - direct and
2189 indirect. Duplicate the whole thing and distribute
2190 frequencies accordingly. */
2191 if (edge->speculative)
2193 struct cgraph_edge *direct, *indirect;
2194 struct ipa_ref *ref;
2196 gcc_assert (!edge->indirect_unknown_callee);
2197 old_edge->speculative_call_info (direct, indirect, ref);
2199 profile_count indir_cnt = indirect->count;
2200 indirect = indirect->clone (id->dst_node, call_stmt,
2201 gimple_uid (stmt),
2202 num, den,
2203 true);
2205 profile_probability prob
2206 = indir_cnt.probability_in (old_cnt + indir_cnt);
2207 indirect->count
2208 = copy_basic_block->count.apply_probability (prob);
2209 edge->count = copy_basic_block->count - indirect->count;
2210 id->dst_node->clone_reference (ref, stmt);
2212 else
2213 edge->count = copy_basic_block->count;
2215 break;
2217 case CB_CGE_MOVE_CLONES:
2218 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2219 call_stmt);
2220 edge = id->dst_node->get_edge (stmt);
2221 break;
2223 case CB_CGE_MOVE:
2224 edge = id->dst_node->get_edge (orig_stmt);
2225 if (edge)
2226 edge->set_call_stmt (call_stmt);
2227 break;
2229 default:
2230 gcc_unreachable ();
2233 /* Constant propagation on argument done during inlining
2234 may create new direct call. Produce an edge for it. */
2235 if ((!edge
2236 || (edge->indirect_inlining_edge
2237 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2238 && id->dst_node->definition
2239 && (fn = gimple_call_fndecl (stmt)) != NULL)
2241 struct cgraph_node *dest = cgraph_node::get_create (fn);
2243 /* We have missing edge in the callgraph. This can happen
2244 when previous inlining turned an indirect call into a
2245 direct call by constant propagating arguments or we are
2246 producing dead clone (for further cloning). In all
2247 other cases we hit a bug (incorrect node sharing is the
2248 most common reason for missing edges). */
2249 gcc_assert (!dest->definition
2250 || dest->address_taken
2251 || !id->src_node->definition
2252 || !id->dst_node->definition);
2253 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2254 id->dst_node->create_edge_including_clones
2255 (dest, orig_stmt, call_stmt, bb->count,
2256 CIF_ORIGINALLY_INDIRECT_CALL);
2257 else
2258 id->dst_node->create_edge (dest, call_stmt,
2259 bb->count)->inline_failed
2260 = CIF_ORIGINALLY_INDIRECT_CALL;
2261 if (dump_file)
2263 fprintf (dump_file, "Created new direct edge to %s\n",
2264 dest->name ());
2268 notice_special_calls (as_a <gcall *> (stmt));
2271 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2272 id->eh_map, id->eh_lp_nr);
2274 gsi_next (&copy_gsi);
2276 while (!gsi_end_p (copy_gsi));
2278 copy_gsi = gsi_last_bb (copy_basic_block);
2281 return copy_basic_block;
2284 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2285 form is quite easy, since dominator relationship for old basic blocks does
2286 not change.
2288 There is however exception where inlining might change dominator relation
2289 across EH edges from basic block within inlined functions destinating
2290 to landing pads in function we inline into.
2292 The function fills in PHI_RESULTs of such PHI nodes if they refer
2293 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2294 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2295 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2296 set, and this means that there will be no overlapping live ranges
2297 for the underlying symbol.
2299 This might change in future if we allow redirecting of EH edges and
2300 we might want to change way build CFG pre-inlining to include
2301 all the possible edges then. */
2302 static void
2303 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2304 bool can_throw, bool nonlocal_goto)
2306 edge e;
2307 edge_iterator ei;
2309 FOR_EACH_EDGE (e, ei, bb->succs)
2310 if (!e->dest->aux
2311 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2313 gphi *phi;
2314 gphi_iterator si;
2316 if (!nonlocal_goto)
2317 gcc_assert (e->flags & EDGE_EH);
2319 if (!can_throw)
2320 gcc_assert (!(e->flags & EDGE_EH));
2322 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2324 edge re;
2326 phi = si.phi ();
2328 /* For abnormal goto/call edges the receiver can be the
2329 ENTRY_BLOCK. Do not assert this cannot happen. */
2331 gcc_assert ((e->flags & EDGE_EH)
2332 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2334 re = find_edge (ret_bb, e->dest);
2335 gcc_checking_assert (re);
2336 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2337 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2339 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2340 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2345 /* Insert clobbers for automatic variables of inlined ID->src_fn
2346 function at the start of basic block ID->eh_landing_pad_dest. */
2348 static void
2349 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2351 tree var;
2352 basic_block bb = id->eh_landing_pad_dest;
2353 live_vars_map *vars = NULL;
2354 unsigned int cnt = 0;
2355 unsigned int i;
2356 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2357 if (VAR_P (var)
2358 && !DECL_HARD_REGISTER (var)
2359 && !TREE_THIS_VOLATILE (var)
2360 && !DECL_HAS_VALUE_EXPR_P (var)
2361 && !is_gimple_reg (var)
2362 && auto_var_in_fn_p (var, id->src_fn)
2363 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2365 tree *t = id->decl_map->get (var);
2366 if (!t)
2367 continue;
2368 tree new_var = *t;
2369 if (VAR_P (new_var)
2370 && !DECL_HARD_REGISTER (new_var)
2371 && !TREE_THIS_VOLATILE (new_var)
2372 && !DECL_HAS_VALUE_EXPR_P (new_var)
2373 && !is_gimple_reg (new_var)
2374 && auto_var_in_fn_p (new_var, id->dst_fn))
2376 if (vars == NULL)
2377 vars = new live_vars_map;
2378 vars->put (DECL_UID (var), cnt++);
2381 if (vars == NULL)
2382 return;
2384 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2385 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2386 if (VAR_P (var))
2388 edge e;
2389 edge_iterator ei;
2390 bool needed = false;
2391 unsigned int *v = vars->get (DECL_UID (var));
2392 if (v == NULL)
2393 continue;
2394 FOR_EACH_EDGE (e, ei, bb->preds)
2395 if ((e->flags & EDGE_EH) != 0
2396 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2398 basic_block src_bb = (basic_block) e->src->aux;
2400 if (bitmap_bit_p (&live[src_bb->index], *v))
2402 needed = true;
2403 break;
2406 if (needed)
2408 tree new_var = *id->decl_map->get (var);
2409 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2410 tree clobber = build_clobber (TREE_TYPE (new_var));
2411 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2412 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2415 destroy_live_vars (live);
2416 delete vars;
2419 /* Copy edges from BB into its copy constructed earlier, scale profile
2420 accordingly. Edges will be taken care of later. Assume aux
2421 pointers to point to the copies of each BB. Return true if any
2422 debug stmts are left after a statement that must end the basic block. */
2424 static bool
2425 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2426 basic_block ret_bb, basic_block abnormal_goto_dest,
2427 copy_body_data *id)
2429 basic_block new_bb = (basic_block) bb->aux;
2430 edge_iterator ei;
2431 edge old_edge;
2432 gimple_stmt_iterator si;
2433 bool need_debug_cleanup = false;
2435 /* Use the indices from the original blocks to create edges for the
2436 new ones. */
2437 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2438 if (!(old_edge->flags & EDGE_EH))
2440 edge new_edge;
2441 int flags = old_edge->flags;
2442 location_t locus = old_edge->goto_locus;
2444 /* Return edges do get a FALLTHRU flag when they get inlined. */
2445 if (old_edge->dest->index == EXIT_BLOCK
2446 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2447 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2448 flags |= EDGE_FALLTHRU;
2450 new_edge
2451 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2452 new_edge->probability = old_edge->probability;
2453 if (!id->reset_location)
2454 new_edge->goto_locus = remap_location (locus, id);
2457 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2458 return false;
2460 /* When doing function splitting, we must decrease count of the return block
2461 which was previously reachable by block we did not copy. */
2462 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2463 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2464 if (old_edge->src->index != ENTRY_BLOCK
2465 && !old_edge->src->aux)
2466 new_bb->count -= old_edge->count ().apply_scale (num, den);
2468 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2470 gimple *copy_stmt;
2471 bool can_throw, nonlocal_goto;
2473 copy_stmt = gsi_stmt (si);
2474 if (!is_gimple_debug (copy_stmt))
2475 update_stmt (copy_stmt);
2477 /* Do this before the possible split_block. */
2478 gsi_next (&si);
2480 /* If this tree could throw an exception, there are two
2481 cases where we need to add abnormal edge(s): the
2482 tree wasn't in a region and there is a "current
2483 region" in the caller; or the original tree had
2484 EH edges. In both cases split the block after the tree,
2485 and add abnormal edge(s) as needed; we need both
2486 those from the callee and the caller.
2487 We check whether the copy can throw, because the const
2488 propagation can change an INDIRECT_REF which throws
2489 into a COMPONENT_REF which doesn't. If the copy
2490 can throw, the original could also throw. */
2491 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2492 nonlocal_goto
2493 = (stmt_can_make_abnormal_goto (copy_stmt)
2494 && !computed_goto_p (copy_stmt));
2496 if (can_throw || nonlocal_goto)
2498 if (!gsi_end_p (si))
2500 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2501 gsi_next (&si);
2502 if (gsi_end_p (si))
2503 need_debug_cleanup = true;
2505 if (!gsi_end_p (si))
2506 /* Note that bb's predecessor edges aren't necessarily
2507 right at this point; split_block doesn't care. */
2509 edge e = split_block (new_bb, copy_stmt);
2511 new_bb = e->dest;
2512 new_bb->aux = e->src->aux;
2513 si = gsi_start_bb (new_bb);
2517 bool update_probs = false;
2519 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2521 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2522 update_probs = true;
2524 else if (can_throw)
2526 make_eh_edges (copy_stmt);
2527 update_probs = true;
2530 /* EH edges may not match old edges. Copy as much as possible. */
2531 if (update_probs)
2533 edge e;
2534 edge_iterator ei;
2535 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2537 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2538 if ((old_edge->flags & EDGE_EH)
2539 && (e = find_edge (copy_stmt_bb,
2540 (basic_block) old_edge->dest->aux))
2541 && (e->flags & EDGE_EH))
2542 e->probability = old_edge->probability;
2544 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2545 if (e->flags & EDGE_EH)
2547 if (!e->probability.initialized_p ())
2548 e->probability = profile_probability::never ();
2549 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2551 if (id->eh_landing_pad_dest == NULL)
2552 id->eh_landing_pad_dest = e->dest;
2553 else
2554 gcc_assert (id->eh_landing_pad_dest == e->dest);
2560 /* If the call we inline cannot make abnormal goto do not add
2561 additional abnormal edges but only retain those already present
2562 in the original function body. */
2563 if (abnormal_goto_dest == NULL)
2564 nonlocal_goto = false;
2565 if (nonlocal_goto)
2567 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2569 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2570 nonlocal_goto = false;
2571 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2572 in OpenMP regions which aren't allowed to be left abnormally.
2573 So, no need to add abnormal edge in that case. */
2574 else if (is_gimple_call (copy_stmt)
2575 && gimple_call_internal_p (copy_stmt)
2576 && (gimple_call_internal_fn (copy_stmt)
2577 == IFN_ABNORMAL_DISPATCHER)
2578 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2579 nonlocal_goto = false;
2580 else
2581 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2582 EDGE_ABNORMAL);
2585 if ((can_throw || nonlocal_goto)
2586 && gimple_in_ssa_p (cfun))
2587 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2588 can_throw, nonlocal_goto);
2590 return need_debug_cleanup;
2593 /* Copy the PHIs. All blocks and edges are copied, some blocks
2594 was possibly split and new outgoing EH edges inserted.
2595 BB points to the block of original function and AUX pointers links
2596 the original and newly copied blocks. */
2598 static void
2599 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2601 basic_block const new_bb = (basic_block) bb->aux;
2602 edge_iterator ei;
2603 gphi *phi;
2604 gphi_iterator si;
2605 edge new_edge;
2606 bool inserted = false;
2608 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2610 tree res, new_res;
2611 gphi *new_phi;
2613 phi = si.phi ();
2614 res = PHI_RESULT (phi);
2615 new_res = res;
2616 if (!virtual_operand_p (res))
2618 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2619 if (EDGE_COUNT (new_bb->preds) == 0)
2621 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2622 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2624 else
2626 new_phi = create_phi_node (new_res, new_bb);
2627 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2629 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2630 bb);
2631 tree arg;
2632 tree new_arg;
2633 edge_iterator ei2;
2634 location_t locus;
2636 /* When doing partial cloning, we allow PHIs on the entry
2637 block as long as all the arguments are the same.
2638 Find any input edge to see argument to copy. */
2639 if (!old_edge)
2640 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2641 if (!old_edge->src->aux)
2642 break;
2644 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2645 new_arg = arg;
2646 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2647 gcc_assert (new_arg);
2648 /* With return slot optimization we can end up with
2649 non-gimple (foo *)&this->m, fix that here. */
2650 if (TREE_CODE (new_arg) != SSA_NAME
2651 && TREE_CODE (new_arg) != FUNCTION_DECL
2652 && !is_gimple_val (new_arg))
2654 gimple_seq stmts = NULL;
2655 new_arg = force_gimple_operand (new_arg, &stmts, true,
2656 NULL);
2657 gsi_insert_seq_on_edge (new_edge, stmts);
2658 inserted = true;
2660 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2661 if (id->reset_location)
2662 locus = input_location;
2663 else
2664 locus = remap_location (locus, id);
2665 add_phi_arg (new_phi, new_arg, new_edge, locus);
2671 /* Commit the delayed edge insertions. */
2672 if (inserted)
2673 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2674 gsi_commit_one_edge_insert (new_edge, NULL);
2678 /* Wrapper for remap_decl so it can be used as a callback. */
2680 static tree
2681 remap_decl_1 (tree decl, void *data)
2683 return remap_decl (decl, (copy_body_data *) data);
2686 /* Build struct function and associated datastructures for the new clone
2687 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2688 the cfun to the function of new_fndecl (and current_function_decl too). */
2690 static void
2691 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2693 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2695 if (!DECL_ARGUMENTS (new_fndecl))
2696 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2697 if (!DECL_RESULT (new_fndecl))
2698 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2700 /* Register specific tree functions. */
2701 gimple_register_cfg_hooks ();
2703 /* Get clean struct function. */
2704 push_struct_function (new_fndecl);
2706 /* We will rebuild these, so just sanity check that they are empty. */
2707 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2708 gcc_assert (cfun->local_decls == NULL);
2709 gcc_assert (cfun->cfg == NULL);
2710 gcc_assert (cfun->decl == new_fndecl);
2712 /* Copy items we preserve during cloning. */
2713 cfun->static_chain_decl = src_cfun->static_chain_decl;
2714 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2715 cfun->function_end_locus = src_cfun->function_end_locus;
2716 cfun->curr_properties = src_cfun->curr_properties;
2717 cfun->last_verified = src_cfun->last_verified;
2718 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2719 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2720 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2721 cfun->calls_eh_return = src_cfun->calls_eh_return;
2722 cfun->stdarg = src_cfun->stdarg;
2723 cfun->after_inlining = src_cfun->after_inlining;
2724 cfun->can_throw_non_call_exceptions
2725 = src_cfun->can_throw_non_call_exceptions;
2726 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2727 cfun->returns_struct = src_cfun->returns_struct;
2728 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2730 init_empty_tree_cfg ();
2732 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2734 profile_count num = count;
2735 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2736 profile_count::adjust_for_ipa_scaling (&num, &den);
2738 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2739 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2740 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2741 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2742 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2743 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2744 if (src_cfun->eh)
2745 init_eh_for_function ();
2747 if (src_cfun->gimple_df)
2749 init_tree_ssa (cfun);
2750 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2751 if (cfun->gimple_df->in_ssa_p)
2752 init_ssa_operands (cfun);
2756 /* Helper function for copy_cfg_body. Move debug stmts from the end
2757 of NEW_BB to the beginning of successor basic blocks when needed. If the
2758 successor has multiple predecessors, reset them, otherwise keep
2759 their value. */
2761 static void
2762 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2764 edge e;
2765 edge_iterator ei;
2766 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2768 if (gsi_end_p (si)
2769 || gsi_one_before_end_p (si)
2770 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2771 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2772 return;
2774 FOR_EACH_EDGE (e, ei, new_bb->succs)
2776 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2777 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2778 while (is_gimple_debug (gsi_stmt (ssi)))
2780 gimple *stmt = gsi_stmt (ssi);
2781 gdebug *new_stmt;
2782 tree var;
2783 tree value;
2785 /* For the last edge move the debug stmts instead of copying
2786 them. */
2787 if (ei_one_before_end_p (ei))
2789 si = ssi;
2790 gsi_prev (&ssi);
2791 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2793 gimple_debug_bind_reset_value (stmt);
2794 gimple_set_location (stmt, UNKNOWN_LOCATION);
2796 gsi_remove (&si, false);
2797 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2798 continue;
2801 if (gimple_debug_bind_p (stmt))
2803 var = gimple_debug_bind_get_var (stmt);
2804 if (single_pred_p (e->dest))
2806 value = gimple_debug_bind_get_value (stmt);
2807 value = unshare_expr (value);
2808 new_stmt = gimple_build_debug_bind (var, value, stmt);
2810 else
2811 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2813 else if (gimple_debug_source_bind_p (stmt))
2815 var = gimple_debug_source_bind_get_var (stmt);
2816 value = gimple_debug_source_bind_get_value (stmt);
2817 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2819 else if (gimple_debug_nonbind_marker_p (stmt))
2820 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2821 else
2822 gcc_unreachable ();
2823 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2824 id->debug_stmts.safe_push (new_stmt);
2825 gsi_prev (&ssi);
2830 /* Make a copy of the sub-loops of SRC_PARENT and place them
2831 as siblings of DEST_PARENT. */
2833 static void
2834 copy_loops (copy_body_data *id,
2835 class loop *dest_parent, class loop *src_parent)
2837 class loop *src_loop = src_parent->inner;
2838 while (src_loop)
2840 if (!id->blocks_to_copy
2841 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2843 class loop *dest_loop = alloc_loop ();
2845 /* Assign the new loop its header and latch and associate
2846 those with the new loop. */
2847 dest_loop->header = (basic_block)src_loop->header->aux;
2848 dest_loop->header->loop_father = dest_loop;
2849 if (src_loop->latch != NULL)
2851 dest_loop->latch = (basic_block)src_loop->latch->aux;
2852 dest_loop->latch->loop_father = dest_loop;
2855 /* Copy loop meta-data. */
2856 copy_loop_info (src_loop, dest_loop);
2857 if (dest_loop->unroll)
2858 cfun->has_unroll = true;
2859 if (dest_loop->force_vectorize)
2860 cfun->has_force_vectorize_loops = true;
2861 if (id->src_cfun->last_clique != 0)
2862 dest_loop->owned_clique
2863 = remap_dependence_clique (id,
2864 src_loop->owned_clique
2865 ? src_loop->owned_clique : 1);
2867 /* Finally place it into the loop array and the loop tree. */
2868 place_new_loop (cfun, dest_loop);
2869 flow_loop_tree_node_add (dest_parent, dest_loop);
2871 if (src_loop->simduid)
2873 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2874 cfun->has_simduid_loops = true;
2877 /* Recurse. */
2878 copy_loops (id, dest_loop, src_loop);
2880 src_loop = src_loop->next;
2884 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2886 void
2887 redirect_all_calls (copy_body_data * id, basic_block bb)
2889 gimple_stmt_iterator si;
2890 gimple *last = last_stmt (bb);
2891 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2893 gimple *stmt = gsi_stmt (si);
2894 if (is_gimple_call (stmt))
2896 tree old_lhs = gimple_call_lhs (stmt);
2897 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2898 if (edge)
2900 gimple *new_stmt = edge->redirect_call_stmt_to_callee ();
2901 /* If IPA-SRA transformation, run as part of edge redirection,
2902 removed the LHS because it is unused, save it to
2903 killed_new_ssa_names so that we can prune it from debug
2904 statements. */
2905 if (old_lhs
2906 && TREE_CODE (old_lhs) == SSA_NAME
2907 && !gimple_call_lhs (new_stmt))
2909 if (!id->killed_new_ssa_names)
2910 id->killed_new_ssa_names = new hash_set<tree> (16);
2911 id->killed_new_ssa_names->add (old_lhs);
2914 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2915 gimple_purge_dead_eh_edges (bb);
2921 /* Make a copy of the body of FN so that it can be inserted inline in
2922 another function. Walks FN via CFG, returns new fndecl. */
2924 static tree
2925 copy_cfg_body (copy_body_data * id,
2926 basic_block entry_block_map, basic_block exit_block_map,
2927 basic_block new_entry)
2929 tree callee_fndecl = id->src_fn;
2930 /* Original cfun for the callee, doesn't change. */
2931 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2932 struct function *cfun_to_copy;
2933 basic_block bb;
2934 tree new_fndecl = NULL;
2935 bool need_debug_cleanup = false;
2936 int last;
2937 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2938 profile_count num = entry_block_map->count;
2940 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2942 /* Register specific tree functions. */
2943 gimple_register_cfg_hooks ();
2945 /* If we are inlining just region of the function, make sure to connect
2946 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2947 part of loop, we must compute frequency and probability of
2948 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2949 probabilities of edges incoming from nonduplicated region. */
2950 if (new_entry)
2952 edge e;
2953 edge_iterator ei;
2954 den = profile_count::zero ();
2956 FOR_EACH_EDGE (e, ei, new_entry->preds)
2957 if (!e->src->aux)
2958 den += e->count ();
2959 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2962 profile_count::adjust_for_ipa_scaling (&num, &den);
2964 /* Must have a CFG here at this point. */
2965 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2966 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2969 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2970 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2971 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2972 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2974 /* Duplicate any exception-handling regions. */
2975 if (cfun->eh)
2976 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2977 remap_decl_1, id);
2979 /* Use aux pointers to map the original blocks to copy. */
2980 FOR_EACH_BB_FN (bb, cfun_to_copy)
2981 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2983 basic_block new_bb = copy_bb (id, bb, num, den);
2984 bb->aux = new_bb;
2985 new_bb->aux = bb;
2986 new_bb->loop_father = entry_block_map->loop_father;
2989 last = last_basic_block_for_fn (cfun);
2991 /* Now that we've duplicated the blocks, duplicate their edges. */
2992 basic_block abnormal_goto_dest = NULL;
2993 if (id->call_stmt
2994 && stmt_can_make_abnormal_goto (id->call_stmt))
2996 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2998 bb = gimple_bb (id->call_stmt);
2999 gsi_next (&gsi);
3000 if (gsi_end_p (gsi))
3001 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3003 FOR_ALL_BB_FN (bb, cfun_to_copy)
3004 if (!id->blocks_to_copy
3005 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3006 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3007 abnormal_goto_dest, id);
3009 if (id->eh_landing_pad_dest)
3011 add_clobbers_to_eh_landing_pad (id);
3012 id->eh_landing_pad_dest = NULL;
3015 if (new_entry)
3017 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3018 EDGE_FALLTHRU);
3019 e->probability = profile_probability::always ();
3022 /* Duplicate the loop tree, if available and wanted. */
3023 if (loops_for_fn (src_cfun) != NULL
3024 && current_loops != NULL)
3026 copy_loops (id, entry_block_map->loop_father,
3027 get_loop (src_cfun, 0));
3028 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3029 loops_state_set (LOOPS_NEED_FIXUP);
3032 /* If the loop tree in the source function needed fixup, mark the
3033 destination loop tree for fixup, too. */
3034 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3035 loops_state_set (LOOPS_NEED_FIXUP);
3037 if (gimple_in_ssa_p (cfun))
3038 FOR_ALL_BB_FN (bb, cfun_to_copy)
3039 if (!id->blocks_to_copy
3040 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3041 copy_phis_for_bb (bb, id);
3043 FOR_ALL_BB_FN (bb, cfun_to_copy)
3044 if (bb->aux)
3046 if (need_debug_cleanup
3047 && bb->index != ENTRY_BLOCK
3048 && bb->index != EXIT_BLOCK)
3049 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3050 /* Update call edge destinations. This cannot be done before loop
3051 info is updated, because we may split basic blocks. */
3052 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3053 && bb->index != ENTRY_BLOCK
3054 && bb->index != EXIT_BLOCK)
3055 redirect_all_calls (id, (basic_block)bb->aux);
3056 ((basic_block)bb->aux)->aux = NULL;
3057 bb->aux = NULL;
3060 /* Zero out AUX fields of newly created block during EH edge
3061 insertion. */
3062 for (; last < last_basic_block_for_fn (cfun); last++)
3064 if (need_debug_cleanup)
3065 maybe_move_debug_stmts_to_successors (id,
3066 BASIC_BLOCK_FOR_FN (cfun, last));
3067 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3068 /* Update call edge destinations. This cannot be done before loop
3069 info is updated, because we may split basic blocks. */
3070 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3071 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3073 entry_block_map->aux = NULL;
3074 exit_block_map->aux = NULL;
3076 if (id->eh_map)
3078 delete id->eh_map;
3079 id->eh_map = NULL;
3081 if (id->dependence_map)
3083 delete id->dependence_map;
3084 id->dependence_map = NULL;
3087 return new_fndecl;
3090 /* Copy the debug STMT using ID. We deal with these statements in a
3091 special way: if any variable in their VALUE expression wasn't
3092 remapped yet, we won't remap it, because that would get decl uids
3093 out of sync, causing codegen differences between -g and -g0. If
3094 this arises, we drop the VALUE expression altogether. */
3096 static void
3097 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3099 tree t, *n;
3100 struct walk_stmt_info wi;
3102 if (tree block = gimple_block (stmt))
3104 n = id->decl_map->get (block);
3105 gimple_set_block (stmt, n ? *n : id->block);
3108 if (gimple_debug_nonbind_marker_p (stmt))
3109 return;
3111 /* Remap all the operands in COPY. */
3112 memset (&wi, 0, sizeof (wi));
3113 wi.info = id;
3115 processing_debug_stmt = 1;
3117 if (gimple_debug_source_bind_p (stmt))
3118 t = gimple_debug_source_bind_get_var (stmt);
3119 else if (gimple_debug_bind_p (stmt))
3120 t = gimple_debug_bind_get_var (stmt);
3121 else
3122 gcc_unreachable ();
3124 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3125 && (n = id->debug_map->get (t)))
3127 gcc_assert (VAR_P (*n));
3128 t = *n;
3130 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3131 /* T is a non-localized variable. */;
3132 else
3133 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3135 if (gimple_debug_bind_p (stmt))
3137 gimple_debug_bind_set_var (stmt, t);
3139 if (gimple_debug_bind_has_value_p (stmt))
3140 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3141 remap_gimple_op_r, &wi, NULL);
3143 /* Punt if any decl couldn't be remapped. */
3144 if (processing_debug_stmt < 0)
3145 gimple_debug_bind_reset_value (stmt);
3147 else if (gimple_debug_source_bind_p (stmt))
3149 gimple_debug_source_bind_set_var (stmt, t);
3150 /* When inlining and source bind refers to one of the optimized
3151 away parameters, change the source bind into normal debug bind
3152 referring to the corresponding DEBUG_EXPR_DECL that should have
3153 been bound before the call stmt. */
3154 t = gimple_debug_source_bind_get_value (stmt);
3155 if (t != NULL_TREE
3156 && TREE_CODE (t) == PARM_DECL
3157 && id->call_stmt)
3159 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3160 unsigned int i;
3161 if (debug_args != NULL)
3163 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3164 if ((**debug_args)[i] == DECL_ORIGIN (t)
3165 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3167 t = (**debug_args)[i + 1];
3168 stmt->subcode = GIMPLE_DEBUG_BIND;
3169 gimple_debug_bind_set_value (stmt, t);
3170 break;
3174 if (gimple_debug_source_bind_p (stmt))
3175 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3176 remap_gimple_op_r, &wi, NULL);
3179 processing_debug_stmt = 0;
3181 update_stmt (stmt);
3184 /* Process deferred debug stmts. In order to give values better odds
3185 of being successfully remapped, we delay the processing of debug
3186 stmts until all other stmts that might require remapping are
3187 processed. */
3189 static void
3190 copy_debug_stmts (copy_body_data *id)
3192 size_t i;
3193 gdebug *stmt;
3195 if (!id->debug_stmts.exists ())
3196 return;
3198 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3199 copy_debug_stmt (stmt, id);
3201 id->debug_stmts.release ();
3204 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3205 another function. */
3207 static tree
3208 copy_tree_body (copy_body_data *id)
3210 tree fndecl = id->src_fn;
3211 tree body = DECL_SAVED_TREE (fndecl);
3213 walk_tree (&body, copy_tree_body_r, id, NULL);
3215 return body;
3218 /* Make a copy of the body of FN so that it can be inserted inline in
3219 another function. */
3221 static tree
3222 copy_body (copy_body_data *id,
3223 basic_block entry_block_map, basic_block exit_block_map,
3224 basic_block new_entry)
3226 tree fndecl = id->src_fn;
3227 tree body;
3229 /* If this body has a CFG, walk CFG and copy. */
3230 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3231 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3232 new_entry);
3233 copy_debug_stmts (id);
3234 delete id->killed_new_ssa_names;
3235 id->killed_new_ssa_names = NULL;
3237 return body;
3240 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3241 defined in function FN, or of a data member thereof. */
3243 static bool
3244 self_inlining_addr_expr (tree value, tree fn)
3246 tree var;
3248 if (TREE_CODE (value) != ADDR_EXPR)
3249 return false;
3251 var = get_base_address (TREE_OPERAND (value, 0));
3253 return var && auto_var_in_fn_p (var, fn);
3256 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3257 lexical block and line number information from base_stmt, if given,
3258 or from the last stmt of the block otherwise. */
3260 static gimple *
3261 insert_init_debug_bind (copy_body_data *id,
3262 basic_block bb, tree var, tree value,
3263 gimple *base_stmt)
3265 gimple *note;
3266 gimple_stmt_iterator gsi;
3267 tree tracked_var;
3269 if (!gimple_in_ssa_p (id->src_cfun))
3270 return NULL;
3272 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3273 return NULL;
3275 tracked_var = target_for_debug_bind (var);
3276 if (!tracked_var)
3277 return NULL;
3279 if (bb)
3281 gsi = gsi_last_bb (bb);
3282 if (!base_stmt && !gsi_end_p (gsi))
3283 base_stmt = gsi_stmt (gsi);
3286 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3288 if (bb)
3290 if (!gsi_end_p (gsi))
3291 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3292 else
3293 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3296 return note;
3299 static void
3300 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3302 /* If VAR represents a zero-sized variable, it's possible that the
3303 assignment statement may result in no gimple statements. */
3304 if (init_stmt)
3306 gimple_stmt_iterator si = gsi_last_bb (bb);
3308 /* We can end up with init statements that store to a non-register
3309 from a rhs with a conversion. Handle that here by forcing the
3310 rhs into a temporary. gimple_regimplify_operands is not
3311 prepared to do this for us. */
3312 if (!is_gimple_debug (init_stmt)
3313 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3314 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3315 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3317 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3318 gimple_expr_type (init_stmt),
3319 gimple_assign_rhs1 (init_stmt));
3320 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3321 GSI_NEW_STMT);
3322 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3323 gimple_assign_set_rhs1 (init_stmt, rhs);
3325 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3326 gimple_regimplify_operands (init_stmt, &si);
3328 if (!is_gimple_debug (init_stmt))
3330 tree def = gimple_assign_lhs (init_stmt);
3331 insert_init_debug_bind (id, bb, def, def, init_stmt);
3336 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3337 if need be (which should only be necessary for invalid programs). Attempt
3338 to convert VAL to TYPE and return the result if it is possible, just return
3339 a zero constant of the given type if it fails. */
3341 tree
3342 force_value_to_type (tree type, tree value)
3344 /* If we can match up types by promotion/demotion do so. */
3345 if (fold_convertible_p (type, value))
3346 return fold_convert (type, value);
3348 /* ??? For valid programs we should not end up here.
3349 Still if we end up with truly mismatched types here, fall back
3350 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3351 GIMPLE to the following passes. */
3352 if (!is_gimple_reg_type (TREE_TYPE (value))
3353 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3354 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3355 else
3356 return build_zero_cst (type);
3359 /* Initialize parameter P with VALUE. If needed, produce init statement
3360 at the end of BB. When BB is NULL, we return init statement to be
3361 output later. */
3362 static gimple *
3363 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3364 basic_block bb, tree *vars)
3366 gimple *init_stmt = NULL;
3367 tree var;
3368 tree rhs = value;
3369 tree def = (gimple_in_ssa_p (cfun)
3370 ? ssa_default_def (id->src_cfun, p) : NULL);
3372 if (value
3373 && value != error_mark_node
3374 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3375 rhs = force_value_to_type (TREE_TYPE (p), value);
3377 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3378 here since the type of this decl must be visible to the calling
3379 function. */
3380 var = copy_decl_to_var (p, id);
3382 /* Declare this new variable. */
3383 DECL_CHAIN (var) = *vars;
3384 *vars = var;
3386 /* Make gimplifier happy about this variable. */
3387 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3389 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3390 we would not need to create a new variable here at all, if it
3391 weren't for debug info. Still, we can just use the argument
3392 value. */
3393 if (TREE_READONLY (p)
3394 && !TREE_ADDRESSABLE (p)
3395 && value && !TREE_SIDE_EFFECTS (value)
3396 && !def)
3398 /* We may produce non-gimple trees by adding NOPs or introduce
3399 invalid sharing when operand is not really constant.
3400 It is not big deal to prohibit constant propagation here as
3401 we will constant propagate in DOM1 pass anyway. */
3402 if (is_gimple_min_invariant (value)
3403 && useless_type_conversion_p (TREE_TYPE (p),
3404 TREE_TYPE (value))
3405 /* We have to be very careful about ADDR_EXPR. Make sure
3406 the base variable isn't a local variable of the inlined
3407 function, e.g., when doing recursive inlining, direct or
3408 mutually-recursive or whatever, which is why we don't
3409 just test whether fn == current_function_decl. */
3410 && ! self_inlining_addr_expr (value, fn))
3412 insert_decl_map (id, p, value);
3413 insert_debug_decl_map (id, p, var);
3414 return insert_init_debug_bind (id, bb, var, value, NULL);
3418 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3419 that way, when the PARM_DECL is encountered, it will be
3420 automatically replaced by the VAR_DECL. */
3421 insert_decl_map (id, p, var);
3423 /* Even if P was TREE_READONLY, the new VAR should not be.
3424 In the original code, we would have constructed a
3425 temporary, and then the function body would have never
3426 changed the value of P. However, now, we will be
3427 constructing VAR directly. The constructor body may
3428 change its value multiple times as it is being
3429 constructed. Therefore, it must not be TREE_READONLY;
3430 the back-end assumes that TREE_READONLY variable is
3431 assigned to only once. */
3432 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3433 TREE_READONLY (var) = 0;
3435 /* If there is no setup required and we are in SSA, take the easy route
3436 replacing all SSA names representing the function parameter by the
3437 SSA name passed to function.
3439 We need to construct map for the variable anyway as it might be used
3440 in different SSA names when parameter is set in function.
3442 Do replacement at -O0 for const arguments replaced by constant.
3443 This is important for builtin_constant_p and other construct requiring
3444 constant argument to be visible in inlined function body. */
3445 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3446 && (optimize
3447 || (TREE_READONLY (p)
3448 && is_gimple_min_invariant (rhs)))
3449 && (TREE_CODE (rhs) == SSA_NAME
3450 || is_gimple_min_invariant (rhs))
3451 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3453 insert_decl_map (id, def, rhs);
3454 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3457 /* If the value of argument is never used, don't care about initializing
3458 it. */
3459 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3461 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3462 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3465 /* Initialize this VAR_DECL from the equivalent argument. Convert
3466 the argument to the proper type in case it was promoted. */
3467 if (value)
3469 if (rhs == error_mark_node)
3471 insert_decl_map (id, p, var);
3472 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3475 STRIP_USELESS_TYPE_CONVERSION (rhs);
3477 /* If we are in SSA form properly remap the default definition
3478 or assign to a dummy SSA name if the parameter is unused and
3479 we are not optimizing. */
3480 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3482 if (def)
3484 def = remap_ssa_name (def, id);
3485 init_stmt = gimple_build_assign (def, rhs);
3486 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3487 set_ssa_default_def (cfun, var, NULL);
3489 else if (!optimize)
3491 def = make_ssa_name (var);
3492 init_stmt = gimple_build_assign (def, rhs);
3495 else
3496 init_stmt = gimple_build_assign (var, rhs);
3498 if (bb && init_stmt)
3499 insert_init_stmt (id, bb, init_stmt);
3501 return init_stmt;
3504 /* Generate code to initialize the parameters of the function at the
3505 top of the stack in ID from the GIMPLE_CALL STMT. */
3507 static void
3508 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3509 tree fn, basic_block bb)
3511 tree parms;
3512 size_t i;
3513 tree p;
3514 tree vars = NULL_TREE;
3515 tree static_chain = gimple_call_chain (stmt);
3517 /* Figure out what the parameters are. */
3518 parms = DECL_ARGUMENTS (fn);
3520 /* Loop through the parameter declarations, replacing each with an
3521 equivalent VAR_DECL, appropriately initialized. */
3522 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3524 tree val;
3525 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3526 setup_one_parameter (id, p, val, fn, bb, &vars);
3528 /* After remapping parameters remap their types. This has to be done
3529 in a second loop over all parameters to appropriately remap
3530 variable sized arrays when the size is specified in a
3531 parameter following the array. */
3532 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3534 tree *varp = id->decl_map->get (p);
3535 if (varp && VAR_P (*varp))
3537 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3538 ? ssa_default_def (id->src_cfun, p) : NULL);
3539 tree var = *varp;
3540 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3541 /* Also remap the default definition if it was remapped
3542 to the default definition of the parameter replacement
3543 by the parameter setup. */
3544 if (def)
3546 tree *defp = id->decl_map->get (def);
3547 if (defp
3548 && TREE_CODE (*defp) == SSA_NAME
3549 && SSA_NAME_VAR (*defp) == var)
3550 TREE_TYPE (*defp) = TREE_TYPE (var);
3555 /* Initialize the static chain. */
3556 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3557 gcc_assert (fn != current_function_decl);
3558 if (p)
3560 /* No static chain? Seems like a bug in tree-nested.c. */
3561 gcc_assert (static_chain);
3563 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3566 declare_inline_vars (id->block, vars);
3570 /* Declare a return variable to replace the RESULT_DECL for the
3571 function we are calling. An appropriate DECL_STMT is returned.
3572 The USE_STMT is filled to contain a use of the declaration to
3573 indicate the return value of the function.
3575 RETURN_SLOT, if non-null is place where to store the result. It
3576 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3577 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3579 The return value is a (possibly null) value that holds the result
3580 as seen by the caller. */
3582 static tree
3583 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3584 basic_block entry_bb)
3586 tree callee = id->src_fn;
3587 tree result = DECL_RESULT (callee);
3588 tree callee_type = TREE_TYPE (result);
3589 tree caller_type;
3590 tree var, use;
3592 /* Handle type-mismatches in the function declaration return type
3593 vs. the call expression. */
3594 if (modify_dest)
3595 caller_type = TREE_TYPE (modify_dest);
3596 else
3597 caller_type = TREE_TYPE (TREE_TYPE (callee));
3599 /* We don't need to do anything for functions that don't return anything. */
3600 if (VOID_TYPE_P (callee_type))
3601 return NULL_TREE;
3603 /* If there was a return slot, then the return value is the
3604 dereferenced address of that object. */
3605 if (return_slot)
3607 /* The front end shouldn't have used both return_slot and
3608 a modify expression. */
3609 gcc_assert (!modify_dest);
3610 if (DECL_BY_REFERENCE (result))
3612 tree return_slot_addr = build_fold_addr_expr (return_slot);
3613 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3615 /* We are going to construct *&return_slot and we can't do that
3616 for variables believed to be not addressable.
3618 FIXME: This check possibly can match, because values returned
3619 via return slot optimization are not believed to have address
3620 taken by alias analysis. */
3621 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3622 var = return_slot_addr;
3623 mark_addressable (return_slot);
3625 else
3627 var = return_slot;
3628 gcc_assert (TREE_CODE (var) != SSA_NAME);
3629 if (TREE_ADDRESSABLE (result))
3630 mark_addressable (var);
3632 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3633 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3634 && !DECL_GIMPLE_REG_P (result)
3635 && DECL_P (var))
3636 DECL_GIMPLE_REG_P (var) = 0;
3637 use = NULL;
3638 goto done;
3641 /* All types requiring non-trivial constructors should have been handled. */
3642 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3644 /* Attempt to avoid creating a new temporary variable. */
3645 if (modify_dest
3646 && TREE_CODE (modify_dest) != SSA_NAME)
3648 bool use_it = false;
3650 /* We can't use MODIFY_DEST if there's type promotion involved. */
3651 if (!useless_type_conversion_p (callee_type, caller_type))
3652 use_it = false;
3654 /* ??? If we're assigning to a variable sized type, then we must
3655 reuse the destination variable, because we've no good way to
3656 create variable sized temporaries at this point. */
3657 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3658 use_it = true;
3660 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3661 reuse it as the result of the call directly. Don't do this if
3662 it would promote MODIFY_DEST to addressable. */
3663 else if (TREE_ADDRESSABLE (result))
3664 use_it = false;
3665 else
3667 tree base_m = get_base_address (modify_dest);
3669 /* If the base isn't a decl, then it's a pointer, and we don't
3670 know where that's going to go. */
3671 if (!DECL_P (base_m))
3672 use_it = false;
3673 else if (is_global_var (base_m))
3674 use_it = false;
3675 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3676 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3677 && !DECL_GIMPLE_REG_P (result)
3678 && DECL_GIMPLE_REG_P (base_m))
3679 use_it = false;
3680 else if (!TREE_ADDRESSABLE (base_m))
3681 use_it = true;
3684 if (use_it)
3686 var = modify_dest;
3687 use = NULL;
3688 goto done;
3692 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3694 var = copy_result_decl_to_var (result, id);
3695 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3697 /* Do not have the rest of GCC warn about this variable as it should
3698 not be visible to the user. */
3699 TREE_NO_WARNING (var) = 1;
3701 declare_inline_vars (id->block, var);
3703 /* Build the use expr. If the return type of the function was
3704 promoted, convert it back to the expected type. */
3705 use = var;
3706 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3708 /* If we can match up types by promotion/demotion do so. */
3709 if (fold_convertible_p (caller_type, var))
3710 use = fold_convert (caller_type, var);
3711 else
3713 /* ??? For valid programs we should not end up here.
3714 Still if we end up with truly mismatched types here, fall back
3715 to using a MEM_REF to not leak invalid GIMPLE to the following
3716 passes. */
3717 /* Prevent var from being written into SSA form. */
3718 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3719 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3720 DECL_GIMPLE_REG_P (var) = false;
3721 else if (is_gimple_reg_type (TREE_TYPE (var)))
3722 TREE_ADDRESSABLE (var) = true;
3723 use = fold_build2 (MEM_REF, caller_type,
3724 build_fold_addr_expr (var),
3725 build_int_cst (ptr_type_node, 0));
3729 STRIP_USELESS_TYPE_CONVERSION (use);
3731 if (DECL_BY_REFERENCE (result))
3733 TREE_ADDRESSABLE (var) = 1;
3734 var = build_fold_addr_expr (var);
3737 done:
3738 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3739 way, when the RESULT_DECL is encountered, it will be
3740 automatically replaced by the VAR_DECL.
3742 When returning by reference, ensure that RESULT_DECL remaps to
3743 gimple_val. */
3744 if (DECL_BY_REFERENCE (result)
3745 && !is_gimple_val (var))
3747 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3748 insert_decl_map (id, result, temp);
3749 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3750 it's default_def SSA_NAME. */
3751 if (gimple_in_ssa_p (id->src_cfun)
3752 && is_gimple_reg (result))
3754 temp = make_ssa_name (temp);
3755 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3757 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3759 else
3760 insert_decl_map (id, result, var);
3762 /* Remember this so we can ignore it in remap_decls. */
3763 id->retvar = var;
3764 return use;
3767 /* Determine if the function can be copied. If so return NULL. If
3768 not return a string describng the reason for failure. */
3770 const char *
3771 copy_forbidden (struct function *fun)
3773 const char *reason = fun->cannot_be_copied_reason;
3775 /* Only examine the function once. */
3776 if (fun->cannot_be_copied_set)
3777 return reason;
3779 /* We cannot copy a function that receives a non-local goto
3780 because we cannot remap the destination label used in the
3781 function that is performing the non-local goto. */
3782 /* ??? Actually, this should be possible, if we work at it.
3783 No doubt there's just a handful of places that simply
3784 assume it doesn't happen and don't substitute properly. */
3785 if (fun->has_nonlocal_label)
3787 reason = G_("function %q+F can never be copied "
3788 "because it receives a non-local goto");
3789 goto fail;
3792 if (fun->has_forced_label_in_static)
3794 reason = G_("function %q+F can never be copied because it saves "
3795 "address of local label in a static variable");
3796 goto fail;
3799 fail:
3800 fun->cannot_be_copied_reason = reason;
3801 fun->cannot_be_copied_set = true;
3802 return reason;
3806 static const char *inline_forbidden_reason;
3808 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3809 iff a function cannot be inlined. Also sets the reason why. */
3811 static tree
3812 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3813 struct walk_stmt_info *wip)
3815 tree fn = (tree) wip->info;
3816 tree t;
3817 gimple *stmt = gsi_stmt (*gsi);
3819 switch (gimple_code (stmt))
3821 case GIMPLE_CALL:
3822 /* Refuse to inline alloca call unless user explicitly forced so as
3823 this may change program's memory overhead drastically when the
3824 function using alloca is called in loop. In GCC present in
3825 SPEC2000 inlining into schedule_block cause it to require 2GB of
3826 RAM instead of 256MB. Don't do so for alloca calls emitted for
3827 VLA objects as those can't cause unbounded growth (they're always
3828 wrapped inside stack_save/stack_restore regions. */
3829 if (gimple_maybe_alloca_call_p (stmt)
3830 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3831 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3833 inline_forbidden_reason
3834 = G_("function %q+F can never be inlined because it uses "
3835 "alloca (override using the always_inline attribute)");
3836 *handled_ops_p = true;
3837 return fn;
3840 t = gimple_call_fndecl (stmt);
3841 if (t == NULL_TREE)
3842 break;
3844 /* We cannot inline functions that call setjmp. */
3845 if (setjmp_call_p (t))
3847 inline_forbidden_reason
3848 = G_("function %q+F can never be inlined because it uses setjmp");
3849 *handled_ops_p = true;
3850 return t;
3853 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3854 switch (DECL_FUNCTION_CODE (t))
3856 /* We cannot inline functions that take a variable number of
3857 arguments. */
3858 case BUILT_IN_VA_START:
3859 case BUILT_IN_NEXT_ARG:
3860 case BUILT_IN_VA_END:
3861 inline_forbidden_reason
3862 = G_("function %q+F can never be inlined because it "
3863 "uses variable argument lists");
3864 *handled_ops_p = true;
3865 return t;
3867 case BUILT_IN_LONGJMP:
3868 /* We can't inline functions that call __builtin_longjmp at
3869 all. The non-local goto machinery really requires the
3870 destination be in a different function. If we allow the
3871 function calling __builtin_longjmp to be inlined into the
3872 function calling __builtin_setjmp, Things will Go Awry. */
3873 inline_forbidden_reason
3874 = G_("function %q+F can never be inlined because "
3875 "it uses setjmp-longjmp exception handling");
3876 *handled_ops_p = true;
3877 return t;
3879 case BUILT_IN_NONLOCAL_GOTO:
3880 /* Similarly. */
3881 inline_forbidden_reason
3882 = G_("function %q+F can never be inlined because "
3883 "it uses non-local goto");
3884 *handled_ops_p = true;
3885 return t;
3887 case BUILT_IN_RETURN:
3888 case BUILT_IN_APPLY_ARGS:
3889 /* If a __builtin_apply_args caller would be inlined,
3890 it would be saving arguments of the function it has
3891 been inlined into. Similarly __builtin_return would
3892 return from the function the inline has been inlined into. */
3893 inline_forbidden_reason
3894 = G_("function %q+F can never be inlined because "
3895 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3896 *handled_ops_p = true;
3897 return t;
3899 default:
3900 break;
3902 break;
3904 case GIMPLE_GOTO:
3905 t = gimple_goto_dest (stmt);
3907 /* We will not inline a function which uses computed goto. The
3908 addresses of its local labels, which may be tucked into
3909 global storage, are of course not constant across
3910 instantiations, which causes unexpected behavior. */
3911 if (TREE_CODE (t) != LABEL_DECL)
3913 inline_forbidden_reason
3914 = G_("function %q+F can never be inlined "
3915 "because it contains a computed goto");
3916 *handled_ops_p = true;
3917 return t;
3919 break;
3921 default:
3922 break;
3925 *handled_ops_p = false;
3926 return NULL_TREE;
3929 /* Return true if FNDECL is a function that cannot be inlined into
3930 another one. */
3932 static bool
3933 inline_forbidden_p (tree fndecl)
3935 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3936 struct walk_stmt_info wi;
3937 basic_block bb;
3938 bool forbidden_p = false;
3940 /* First check for shared reasons not to copy the code. */
3941 inline_forbidden_reason = copy_forbidden (fun);
3942 if (inline_forbidden_reason != NULL)
3943 return true;
3945 /* Next, walk the statements of the function looking for
3946 constraucts we can't handle, or are non-optimal for inlining. */
3947 hash_set<tree> visited_nodes;
3948 memset (&wi, 0, sizeof (wi));
3949 wi.info = (void *) fndecl;
3950 wi.pset = &visited_nodes;
3952 FOR_EACH_BB_FN (bb, fun)
3954 gimple *ret;
3955 gimple_seq seq = bb_seq (bb);
3956 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3957 forbidden_p = (ret != NULL);
3958 if (forbidden_p)
3959 break;
3962 return forbidden_p;
3965 /* Return false if the function FNDECL cannot be inlined on account of its
3966 attributes, true otherwise. */
3967 static bool
3968 function_attribute_inlinable_p (const_tree fndecl)
3970 if (targetm.attribute_table)
3972 const_tree a;
3974 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3976 const_tree name = get_attribute_name (a);
3977 int i;
3979 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3980 if (is_attribute_p (targetm.attribute_table[i].name, name))
3981 return targetm.function_attribute_inlinable_p (fndecl);
3985 return true;
3988 /* Returns nonzero if FN is a function that does not have any
3989 fundamental inline blocking properties. */
3991 bool
3992 tree_inlinable_function_p (tree fn)
3994 bool inlinable = true;
3995 bool do_warning;
3996 tree always_inline;
3998 /* If we've already decided this function shouldn't be inlined,
3999 there's no need to check again. */
4000 if (DECL_UNINLINABLE (fn))
4001 return false;
4003 /* We only warn for functions declared `inline' by the user. */
4004 do_warning = (warn_inline
4005 && DECL_DECLARED_INLINE_P (fn)
4006 && !DECL_NO_INLINE_WARNING_P (fn)
4007 && !DECL_IN_SYSTEM_HEADER (fn));
4009 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4011 if (flag_no_inline
4012 && always_inline == NULL)
4014 if (do_warning)
4015 warning (OPT_Winline, "function %q+F can never be inlined because it "
4016 "is suppressed using %<-fno-inline%>", fn);
4017 inlinable = false;
4020 else if (!function_attribute_inlinable_p (fn))
4022 if (do_warning)
4023 warning (OPT_Winline, "function %q+F can never be inlined because it "
4024 "uses attributes conflicting with inlining", fn);
4025 inlinable = false;
4028 else if (inline_forbidden_p (fn))
4030 /* See if we should warn about uninlinable functions. Previously,
4031 some of these warnings would be issued while trying to expand
4032 the function inline, but that would cause multiple warnings
4033 about functions that would for example call alloca. But since
4034 this a property of the function, just one warning is enough.
4035 As a bonus we can now give more details about the reason why a
4036 function is not inlinable. */
4037 if (always_inline)
4038 error (inline_forbidden_reason, fn);
4039 else if (do_warning)
4040 warning (OPT_Winline, inline_forbidden_reason, fn);
4042 inlinable = false;
4045 /* Squirrel away the result so that we don't have to check again. */
4046 DECL_UNINLINABLE (fn) = !inlinable;
4048 return inlinable;
4051 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4052 word size and take possible memcpy call into account and return
4053 cost based on whether optimizing for size or speed according to SPEED_P. */
4056 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4058 HOST_WIDE_INT size;
4060 gcc_assert (!VOID_TYPE_P (type));
4062 if (TREE_CODE (type) == VECTOR_TYPE)
4064 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4065 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4066 int orig_mode_size
4067 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4068 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4069 return ((orig_mode_size + simd_mode_size - 1)
4070 / simd_mode_size);
4073 size = int_size_in_bytes (type);
4075 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4076 /* Cost of a memcpy call, 3 arguments and the call. */
4077 return 4;
4078 else
4079 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4082 /* Returns cost of operation CODE, according to WEIGHTS */
4084 static int
4085 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4086 tree op1 ATTRIBUTE_UNUSED, tree op2)
4088 switch (code)
4090 /* These are "free" conversions, or their presumed cost
4091 is folded into other operations. */
4092 case RANGE_EXPR:
4093 CASE_CONVERT:
4094 case COMPLEX_EXPR:
4095 case PAREN_EXPR:
4096 case VIEW_CONVERT_EXPR:
4097 return 0;
4099 /* Assign cost of 1 to usual operations.
4100 ??? We may consider mapping RTL costs to this. */
4101 case COND_EXPR:
4102 case VEC_COND_EXPR:
4103 case VEC_PERM_EXPR:
4105 case PLUS_EXPR:
4106 case POINTER_PLUS_EXPR:
4107 case POINTER_DIFF_EXPR:
4108 case MINUS_EXPR:
4109 case MULT_EXPR:
4110 case MULT_HIGHPART_EXPR:
4112 case ADDR_SPACE_CONVERT_EXPR:
4113 case FIXED_CONVERT_EXPR:
4114 case FIX_TRUNC_EXPR:
4116 case NEGATE_EXPR:
4117 case FLOAT_EXPR:
4118 case MIN_EXPR:
4119 case MAX_EXPR:
4120 case ABS_EXPR:
4121 case ABSU_EXPR:
4123 case LSHIFT_EXPR:
4124 case RSHIFT_EXPR:
4125 case LROTATE_EXPR:
4126 case RROTATE_EXPR:
4128 case BIT_IOR_EXPR:
4129 case BIT_XOR_EXPR:
4130 case BIT_AND_EXPR:
4131 case BIT_NOT_EXPR:
4133 case TRUTH_ANDIF_EXPR:
4134 case TRUTH_ORIF_EXPR:
4135 case TRUTH_AND_EXPR:
4136 case TRUTH_OR_EXPR:
4137 case TRUTH_XOR_EXPR:
4138 case TRUTH_NOT_EXPR:
4140 case LT_EXPR:
4141 case LE_EXPR:
4142 case GT_EXPR:
4143 case GE_EXPR:
4144 case EQ_EXPR:
4145 case NE_EXPR:
4146 case ORDERED_EXPR:
4147 case UNORDERED_EXPR:
4149 case UNLT_EXPR:
4150 case UNLE_EXPR:
4151 case UNGT_EXPR:
4152 case UNGE_EXPR:
4153 case UNEQ_EXPR:
4154 case LTGT_EXPR:
4156 case CONJ_EXPR:
4158 case PREDECREMENT_EXPR:
4159 case PREINCREMENT_EXPR:
4160 case POSTDECREMENT_EXPR:
4161 case POSTINCREMENT_EXPR:
4163 case REALIGN_LOAD_EXPR:
4165 case WIDEN_SUM_EXPR:
4166 case WIDEN_MULT_EXPR:
4167 case DOT_PROD_EXPR:
4168 case SAD_EXPR:
4169 case WIDEN_MULT_PLUS_EXPR:
4170 case WIDEN_MULT_MINUS_EXPR:
4171 case WIDEN_LSHIFT_EXPR:
4173 case VEC_WIDEN_MULT_HI_EXPR:
4174 case VEC_WIDEN_MULT_LO_EXPR:
4175 case VEC_WIDEN_MULT_EVEN_EXPR:
4176 case VEC_WIDEN_MULT_ODD_EXPR:
4177 case VEC_UNPACK_HI_EXPR:
4178 case VEC_UNPACK_LO_EXPR:
4179 case VEC_UNPACK_FLOAT_HI_EXPR:
4180 case VEC_UNPACK_FLOAT_LO_EXPR:
4181 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4182 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4183 case VEC_PACK_TRUNC_EXPR:
4184 case VEC_PACK_SAT_EXPR:
4185 case VEC_PACK_FIX_TRUNC_EXPR:
4186 case VEC_PACK_FLOAT_EXPR:
4187 case VEC_WIDEN_LSHIFT_HI_EXPR:
4188 case VEC_WIDEN_LSHIFT_LO_EXPR:
4189 case VEC_DUPLICATE_EXPR:
4190 case VEC_SERIES_EXPR:
4192 return 1;
4194 /* Few special cases of expensive operations. This is useful
4195 to avoid inlining on functions having too many of these. */
4196 case TRUNC_DIV_EXPR:
4197 case CEIL_DIV_EXPR:
4198 case FLOOR_DIV_EXPR:
4199 case ROUND_DIV_EXPR:
4200 case EXACT_DIV_EXPR:
4201 case TRUNC_MOD_EXPR:
4202 case CEIL_MOD_EXPR:
4203 case FLOOR_MOD_EXPR:
4204 case ROUND_MOD_EXPR:
4205 case RDIV_EXPR:
4206 if (TREE_CODE (op2) != INTEGER_CST)
4207 return weights->div_mod_cost;
4208 return 1;
4210 /* Bit-field insertion needs several shift and mask operations. */
4211 case BIT_INSERT_EXPR:
4212 return 3;
4214 default:
4215 /* We expect a copy assignment with no operator. */
4216 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4217 return 0;
4222 /* Estimate number of instructions that will be created by expanding
4223 the statements in the statement sequence STMTS.
4224 WEIGHTS contains weights attributed to various constructs. */
4227 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4229 int cost;
4230 gimple_stmt_iterator gsi;
4232 cost = 0;
4233 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4234 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4236 return cost;
4240 /* Estimate number of instructions that will be created by expanding STMT.
4241 WEIGHTS contains weights attributed to various constructs. */
4244 estimate_num_insns (gimple *stmt, eni_weights *weights)
4246 unsigned cost, i;
4247 enum gimple_code code = gimple_code (stmt);
4248 tree lhs;
4249 tree rhs;
4251 switch (code)
4253 case GIMPLE_ASSIGN:
4254 /* Try to estimate the cost of assignments. We have three cases to
4255 deal with:
4256 1) Simple assignments to registers;
4257 2) Stores to things that must live in memory. This includes
4258 "normal" stores to scalars, but also assignments of large
4259 structures, or constructors of big arrays;
4261 Let us look at the first two cases, assuming we have "a = b + C":
4262 <GIMPLE_ASSIGN <var_decl "a">
4263 <plus_expr <var_decl "b"> <constant C>>
4264 If "a" is a GIMPLE register, the assignment to it is free on almost
4265 any target, because "a" usually ends up in a real register. Hence
4266 the only cost of this expression comes from the PLUS_EXPR, and we
4267 can ignore the GIMPLE_ASSIGN.
4268 If "a" is not a GIMPLE register, the assignment to "a" will most
4269 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4270 of moving something into "a", which we compute using the function
4271 estimate_move_cost. */
4272 if (gimple_clobber_p (stmt))
4273 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4275 lhs = gimple_assign_lhs (stmt);
4276 rhs = gimple_assign_rhs1 (stmt);
4278 cost = 0;
4280 /* Account for the cost of moving to / from memory. */
4281 if (gimple_store_p (stmt))
4282 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4283 if (gimple_assign_load_p (stmt))
4284 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4286 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4287 gimple_assign_rhs1 (stmt),
4288 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4289 == GIMPLE_BINARY_RHS
4290 ? gimple_assign_rhs2 (stmt) : NULL);
4291 break;
4293 case GIMPLE_COND:
4294 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4295 gimple_op (stmt, 0),
4296 gimple_op (stmt, 1));
4297 break;
4299 case GIMPLE_SWITCH:
4301 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4302 /* Take into account cost of the switch + guess 2 conditional jumps for
4303 each case label.
4305 TODO: once the switch expansion logic is sufficiently separated, we can
4306 do better job on estimating cost of the switch. */
4307 if (weights->time_based)
4308 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4309 else
4310 cost = gimple_switch_num_labels (switch_stmt) * 2;
4312 break;
4314 case GIMPLE_CALL:
4316 tree decl;
4318 if (gimple_call_internal_p (stmt))
4319 return 0;
4320 else if ((decl = gimple_call_fndecl (stmt))
4321 && fndecl_built_in_p (decl))
4323 /* Do not special case builtins where we see the body.
4324 This just confuse inliner. */
4325 struct cgraph_node *node;
4326 if (!(node = cgraph_node::get (decl))
4327 || node->definition)
4329 /* For buitins that are likely expanded to nothing or
4330 inlined do not account operand costs. */
4331 else if (is_simple_builtin (decl))
4332 return 0;
4333 else if (is_inexpensive_builtin (decl))
4334 return weights->target_builtin_call_cost;
4335 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4337 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4338 specialize the cheap expansion we do here.
4339 ??? This asks for a more general solution. */
4340 switch (DECL_FUNCTION_CODE (decl))
4342 case BUILT_IN_POW:
4343 case BUILT_IN_POWF:
4344 case BUILT_IN_POWL:
4345 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4346 && (real_equal
4347 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4348 &dconst2)))
4349 return estimate_operator_cost
4350 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4351 gimple_call_arg (stmt, 0));
4352 break;
4354 default:
4355 break;
4360 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4361 if (gimple_call_lhs (stmt))
4362 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4363 weights->time_based);
4364 for (i = 0; i < gimple_call_num_args (stmt); i++)
4366 tree arg = gimple_call_arg (stmt, i);
4367 cost += estimate_move_cost (TREE_TYPE (arg),
4368 weights->time_based);
4370 break;
4373 case GIMPLE_RETURN:
4374 return weights->return_cost;
4376 case GIMPLE_GOTO:
4377 case GIMPLE_LABEL:
4378 case GIMPLE_NOP:
4379 case GIMPLE_PHI:
4380 case GIMPLE_PREDICT:
4381 case GIMPLE_DEBUG:
4382 return 0;
4384 case GIMPLE_ASM:
4386 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4387 /* 1000 means infinity. This avoids overflows later
4388 with very long asm statements. */
4389 if (count > 1000)
4390 count = 1000;
4391 /* If this asm is asm inline, count anything as minimum size. */
4392 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4393 count = MIN (1, count);
4394 return MAX (1, count);
4397 case GIMPLE_RESX:
4398 /* This is either going to be an external function call with one
4399 argument, or two register copy statements plus a goto. */
4400 return 2;
4402 case GIMPLE_EH_DISPATCH:
4403 /* ??? This is going to turn into a switch statement. Ideally
4404 we'd have a look at the eh region and estimate the number of
4405 edges involved. */
4406 return 10;
4408 case GIMPLE_BIND:
4409 return estimate_num_insns_seq (
4410 gimple_bind_body (as_a <gbind *> (stmt)),
4411 weights);
4413 case GIMPLE_EH_FILTER:
4414 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4416 case GIMPLE_CATCH:
4417 return estimate_num_insns_seq (gimple_catch_handler (
4418 as_a <gcatch *> (stmt)),
4419 weights);
4421 case GIMPLE_TRY:
4422 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4423 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4425 /* OMP directives are generally very expensive. */
4427 case GIMPLE_OMP_RETURN:
4428 case GIMPLE_OMP_SECTIONS_SWITCH:
4429 case GIMPLE_OMP_ATOMIC_STORE:
4430 case GIMPLE_OMP_CONTINUE:
4431 /* ...except these, which are cheap. */
4432 return 0;
4434 case GIMPLE_OMP_ATOMIC_LOAD:
4435 return weights->omp_cost;
4437 case GIMPLE_OMP_FOR:
4438 return (weights->omp_cost
4439 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4440 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4442 case GIMPLE_OMP_PARALLEL:
4443 case GIMPLE_OMP_TASK:
4444 case GIMPLE_OMP_CRITICAL:
4445 case GIMPLE_OMP_MASTER:
4446 case GIMPLE_OMP_TASKGROUP:
4447 case GIMPLE_OMP_ORDERED:
4448 case GIMPLE_OMP_SCAN:
4449 case GIMPLE_OMP_SECTION:
4450 case GIMPLE_OMP_SECTIONS:
4451 case GIMPLE_OMP_SINGLE:
4452 case GIMPLE_OMP_TARGET:
4453 case GIMPLE_OMP_TEAMS:
4454 return (weights->omp_cost
4455 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4457 case GIMPLE_TRANSACTION:
4458 return (weights->tm_cost
4459 + estimate_num_insns_seq (gimple_transaction_body (
4460 as_a <gtransaction *> (stmt)),
4461 weights));
4463 default:
4464 gcc_unreachable ();
4467 return cost;
4470 /* Estimate number of instructions that will be created by expanding
4471 function FNDECL. WEIGHTS contains weights attributed to various
4472 constructs. */
4475 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4477 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4478 gimple_stmt_iterator bsi;
4479 basic_block bb;
4480 int n = 0;
4482 gcc_assert (my_function && my_function->cfg);
4483 FOR_EACH_BB_FN (bb, my_function)
4485 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4486 n += estimate_num_insns (gsi_stmt (bsi), weights);
4489 return n;
4493 /* Initializes weights used by estimate_num_insns. */
4495 void
4496 init_inline_once (void)
4498 eni_size_weights.call_cost = 1;
4499 eni_size_weights.indirect_call_cost = 3;
4500 eni_size_weights.target_builtin_call_cost = 1;
4501 eni_size_weights.div_mod_cost = 1;
4502 eni_size_weights.omp_cost = 40;
4503 eni_size_weights.tm_cost = 10;
4504 eni_size_weights.time_based = false;
4505 eni_size_weights.return_cost = 1;
4507 /* Estimating time for call is difficult, since we have no idea what the
4508 called function does. In the current uses of eni_time_weights,
4509 underestimating the cost does less harm than overestimating it, so
4510 we choose a rather small value here. */
4511 eni_time_weights.call_cost = 10;
4512 eni_time_weights.indirect_call_cost = 15;
4513 eni_time_weights.target_builtin_call_cost = 1;
4514 eni_time_weights.div_mod_cost = 10;
4515 eni_time_weights.omp_cost = 40;
4516 eni_time_weights.tm_cost = 40;
4517 eni_time_weights.time_based = true;
4518 eni_time_weights.return_cost = 2;
4522 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4524 static void
4525 prepend_lexical_block (tree current_block, tree new_block)
4527 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4528 BLOCK_SUBBLOCKS (current_block) = new_block;
4529 BLOCK_SUPERCONTEXT (new_block) = current_block;
4532 /* Add local variables from CALLEE to CALLER. */
4534 static inline void
4535 add_local_variables (struct function *callee, struct function *caller,
4536 copy_body_data *id)
4538 tree var;
4539 unsigned ix;
4541 FOR_EACH_LOCAL_DECL (callee, ix, var)
4542 if (!can_be_nonlocal (var, id))
4544 tree new_var = remap_decl (var, id);
4546 /* Remap debug-expressions. */
4547 if (VAR_P (new_var)
4548 && DECL_HAS_DEBUG_EXPR_P (var)
4549 && new_var != var)
4551 tree tem = DECL_DEBUG_EXPR (var);
4552 bool old_regimplify = id->regimplify;
4553 id->remapping_type_depth++;
4554 walk_tree (&tem, copy_tree_body_r, id, NULL);
4555 id->remapping_type_depth--;
4556 id->regimplify = old_regimplify;
4557 SET_DECL_DEBUG_EXPR (new_var, tem);
4558 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4560 add_local_decl (caller, new_var);
4564 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4565 have brought in or introduced any debug stmts for SRCVAR. */
4567 static inline void
4568 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4570 tree *remappedvarp = id->decl_map->get (srcvar);
4572 if (!remappedvarp)
4573 return;
4575 if (!VAR_P (*remappedvarp))
4576 return;
4578 if (*remappedvarp == id->retvar)
4579 return;
4581 tree tvar = target_for_debug_bind (*remappedvarp);
4582 if (!tvar)
4583 return;
4585 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4586 id->call_stmt);
4587 gimple_seq_add_stmt (bindings, stmt);
4590 /* For each inlined variable for which we may have debug bind stmts,
4591 add before GSI a final debug stmt resetting it, marking the end of
4592 its life, so that var-tracking knows it doesn't have to compute
4593 further locations for it. */
4595 static inline void
4596 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4598 tree var;
4599 unsigned ix;
4600 gimple_seq bindings = NULL;
4602 if (!gimple_in_ssa_p (id->src_cfun))
4603 return;
4605 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4606 return;
4608 for (var = DECL_ARGUMENTS (id->src_fn);
4609 var; var = DECL_CHAIN (var))
4610 reset_debug_binding (id, var, &bindings);
4612 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4613 reset_debug_binding (id, var, &bindings);
4615 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4618 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4620 static bool
4621 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4623 tree use_retvar;
4624 tree fn;
4625 hash_map<tree, tree> *dst;
4626 hash_map<tree, tree> *st = NULL;
4627 tree return_slot;
4628 tree modify_dest;
4629 struct cgraph_edge *cg_edge;
4630 cgraph_inline_failed_t reason;
4631 basic_block return_block;
4632 edge e;
4633 gimple_stmt_iterator gsi, stmt_gsi;
4634 bool successfully_inlined = false;
4635 bool purge_dead_abnormal_edges;
4636 gcall *call_stmt;
4637 unsigned int prop_mask, src_properties;
4638 struct function *dst_cfun;
4639 tree simduid;
4640 use_operand_p use;
4641 gimple *simtenter_stmt = NULL;
4642 vec<tree> *simtvars_save;
4644 /* The gimplifier uses input_location in too many places, such as
4645 internal_get_tmp_var (). */
4646 location_t saved_location = input_location;
4647 input_location = gimple_location (stmt);
4649 /* From here on, we're only interested in CALL_EXPRs. */
4650 call_stmt = dyn_cast <gcall *> (stmt);
4651 if (!call_stmt)
4652 goto egress;
4654 cg_edge = id->dst_node->get_edge (stmt);
4655 gcc_checking_assert (cg_edge);
4656 /* First, see if we can figure out what function is being called.
4657 If we cannot, then there is no hope of inlining the function. */
4658 if (cg_edge->indirect_unknown_callee)
4659 goto egress;
4660 fn = cg_edge->callee->decl;
4661 gcc_checking_assert (fn);
4663 /* If FN is a declaration of a function in a nested scope that was
4664 globally declared inline, we don't set its DECL_INITIAL.
4665 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4666 C++ front-end uses it for cdtors to refer to their internal
4667 declarations, that are not real functions. Fortunately those
4668 don't have trees to be saved, so we can tell by checking their
4669 gimple_body. */
4670 if (!DECL_INITIAL (fn)
4671 && DECL_ABSTRACT_ORIGIN (fn)
4672 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4673 fn = DECL_ABSTRACT_ORIGIN (fn);
4675 /* Don't try to inline functions that are not well-suited to inlining. */
4676 if (cg_edge->inline_failed)
4678 reason = cg_edge->inline_failed;
4679 /* If this call was originally indirect, we do not want to emit any
4680 inlining related warnings or sorry messages because there are no
4681 guarantees regarding those. */
4682 if (cg_edge->indirect_inlining_edge)
4683 goto egress;
4685 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4686 /* For extern inline functions that get redefined we always
4687 silently ignored always_inline flag. Better behavior would
4688 be to be able to keep both bodies and use extern inline body
4689 for inlining, but we can't do that because frontends overwrite
4690 the body. */
4691 && !cg_edge->callee->local.redefined_extern_inline
4692 /* During early inline pass, report only when optimization is
4693 not turned on. */
4694 && (symtab->global_info_ready
4695 || !optimize
4696 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4697 /* PR 20090218-1_0.c. Body can be provided by another module. */
4698 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4700 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4701 cgraph_inline_failed_string (reason));
4702 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4703 inform (gimple_location (stmt), "called from here");
4704 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4705 inform (DECL_SOURCE_LOCATION (cfun->decl),
4706 "called from this function");
4708 else if (warn_inline
4709 && DECL_DECLARED_INLINE_P (fn)
4710 && !DECL_NO_INLINE_WARNING_P (fn)
4711 && !DECL_IN_SYSTEM_HEADER (fn)
4712 && reason != CIF_UNSPECIFIED
4713 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4714 /* Do not warn about not inlined recursive calls. */
4715 && !cg_edge->recursive_p ()
4716 /* Avoid warnings during early inline pass. */
4717 && symtab->global_info_ready)
4719 auto_diagnostic_group d;
4720 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4721 fn, _(cgraph_inline_failed_string (reason))))
4723 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4724 inform (gimple_location (stmt), "called from here");
4725 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4726 inform (DECL_SOURCE_LOCATION (cfun->decl),
4727 "called from this function");
4730 goto egress;
4732 id->src_node = cg_edge->callee;
4734 /* If callee is thunk, all we need is to adjust the THIS pointer
4735 and redirect to function being thunked. */
4736 if (id->src_node->thunk.thunk_p)
4738 cgraph_edge *edge;
4739 tree virtual_offset = NULL;
4740 profile_count count = cg_edge->count;
4741 tree op;
4742 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4744 cg_edge->remove ();
4745 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4746 gimple_uid (stmt),
4747 profile_count::one (),
4748 profile_count::one (),
4749 true);
4750 edge->count = count;
4751 if (id->src_node->thunk.virtual_offset_p)
4752 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4753 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4754 NULL);
4755 gsi_insert_before (&iter, gimple_build_assign (op,
4756 gimple_call_arg (stmt, 0)),
4757 GSI_NEW_STMT);
4758 gcc_assert (id->src_node->thunk.this_adjusting);
4759 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4760 virtual_offset, id->src_node->thunk.indirect_offset);
4762 gimple_call_set_arg (stmt, 0, op);
4763 gimple_call_set_fndecl (stmt, edge->callee->decl);
4764 update_stmt (stmt);
4765 id->src_node->remove ();
4766 expand_call_inline (bb, stmt, id);
4767 maybe_remove_unused_call_args (cfun, stmt);
4768 return true;
4770 fn = cg_edge->callee->decl;
4771 cg_edge->callee->get_untransformed_body ();
4773 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4774 cg_edge->callee->verify ();
4776 /* We will be inlining this callee. */
4777 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4779 /* Update the callers EH personality. */
4780 if (DECL_FUNCTION_PERSONALITY (fn))
4781 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4782 = DECL_FUNCTION_PERSONALITY (fn);
4784 /* Split the block before the GIMPLE_CALL. */
4785 stmt_gsi = gsi_for_stmt (stmt);
4786 gsi_prev (&stmt_gsi);
4787 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4788 bb = e->src;
4789 return_block = e->dest;
4790 remove_edge (e);
4792 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4793 been the source of abnormal edges. In this case, schedule
4794 the removal of dead abnormal edges. */
4795 gsi = gsi_start_bb (return_block);
4796 gsi_next (&gsi);
4797 purge_dead_abnormal_edges = gsi_end_p (gsi);
4799 stmt_gsi = gsi_start_bb (return_block);
4801 /* Build a block containing code to initialize the arguments, the
4802 actual inline expansion of the body, and a label for the return
4803 statements within the function to jump to. The type of the
4804 statement expression is the return type of the function call.
4805 ??? If the call does not have an associated block then we will
4806 remap all callee blocks to NULL, effectively dropping most of
4807 its debug information. This should only happen for calls to
4808 artificial decls inserted by the compiler itself. We need to
4809 either link the inlined blocks into the caller block tree or
4810 not refer to them in any way to not break GC for locations. */
4811 if (tree block = gimple_block (stmt))
4813 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4814 to make inlined_function_outer_scope_p return true on this BLOCK. */
4815 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4816 if (loc == UNKNOWN_LOCATION)
4817 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4818 if (loc == UNKNOWN_LOCATION)
4819 loc = BUILTINS_LOCATION;
4820 id->block = make_node (BLOCK);
4821 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4822 BLOCK_SOURCE_LOCATION (id->block) = loc;
4823 prepend_lexical_block (block, id->block);
4826 /* Local declarations will be replaced by their equivalents in this map. */
4827 st = id->decl_map;
4828 id->decl_map = new hash_map<tree, tree>;
4829 dst = id->debug_map;
4830 id->debug_map = NULL;
4831 if (flag_stack_reuse != SR_NONE)
4832 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4834 /* Record the function we are about to inline. */
4835 id->src_fn = fn;
4836 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4837 id->reset_location = DECL_IGNORED_P (fn);
4838 id->call_stmt = call_stmt;
4840 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4841 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4842 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4843 simtvars_save = id->dst_simt_vars;
4844 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4845 && (simduid = bb->loop_father->simduid) != NULL_TREE
4846 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4847 && single_imm_use (simduid, &use, &simtenter_stmt)
4848 && is_gimple_call (simtenter_stmt)
4849 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4850 vec_alloc (id->dst_simt_vars, 0);
4851 else
4852 id->dst_simt_vars = NULL;
4854 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4855 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4857 /* If the src function contains an IFN_VA_ARG, then so will the dst
4858 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4859 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4860 src_properties = id->src_cfun->curr_properties & prop_mask;
4861 if (src_properties != prop_mask)
4862 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4863 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4865 gcc_assert (!id->src_cfun->after_inlining);
4867 id->entry_bb = bb;
4868 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4870 gimple_stmt_iterator si = gsi_last_bb (bb);
4871 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4872 NOT_TAKEN),
4873 GSI_NEW_STMT);
4875 initialize_inlined_parameters (id, stmt, fn, bb);
4876 if (debug_nonbind_markers_p && debug_inline_points && id->block
4877 && inlined_function_outer_scope_p (id->block))
4879 gimple_stmt_iterator si = gsi_last_bb (bb);
4880 gsi_insert_after (&si, gimple_build_debug_inline_entry
4881 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4882 GSI_NEW_STMT);
4885 if (DECL_INITIAL (fn))
4887 if (gimple_block (stmt))
4889 tree *var;
4891 prepend_lexical_block (id->block,
4892 remap_blocks (DECL_INITIAL (fn), id));
4893 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4894 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4895 == NULL_TREE));
4896 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4897 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4898 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4899 under it. The parameters can be then evaluated in the debugger,
4900 but don't show in backtraces. */
4901 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4902 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4904 tree v = *var;
4905 *var = TREE_CHAIN (v);
4906 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4907 BLOCK_VARS (id->block) = v;
4909 else
4910 var = &TREE_CHAIN (*var);
4912 else
4913 remap_blocks_to_null (DECL_INITIAL (fn), id);
4916 /* Return statements in the function body will be replaced by jumps
4917 to the RET_LABEL. */
4918 gcc_assert (DECL_INITIAL (fn));
4919 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4921 /* Find the LHS to which the result of this call is assigned. */
4922 return_slot = NULL;
4923 if (gimple_call_lhs (stmt))
4925 modify_dest = gimple_call_lhs (stmt);
4927 /* The function which we are inlining might not return a value,
4928 in which case we should issue a warning that the function
4929 does not return a value. In that case the optimizers will
4930 see that the variable to which the value is assigned was not
4931 initialized. We do not want to issue a warning about that
4932 uninitialized variable. */
4933 if (DECL_P (modify_dest))
4934 TREE_NO_WARNING (modify_dest) = 1;
4936 if (gimple_call_return_slot_opt_p (call_stmt))
4938 return_slot = modify_dest;
4939 modify_dest = NULL;
4942 else
4943 modify_dest = NULL;
4945 /* If we are inlining a call to the C++ operator new, we don't want
4946 to use type based alias analysis on the return value. Otherwise
4947 we may get confused if the compiler sees that the inlined new
4948 function returns a pointer which was just deleted. See bug
4949 33407. */
4950 if (DECL_IS_OPERATOR_NEW_P (fn))
4952 return_slot = NULL;
4953 modify_dest = NULL;
4956 /* Declare the return variable for the function. */
4957 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4959 /* Add local vars in this inlined callee to caller. */
4960 add_local_variables (id->src_cfun, cfun, id);
4962 if (id->src_node->clone.performed_splits)
4964 /* Any calls from the inlined function will be turned into calls from the
4965 function we inline into. We must preserve notes about how to split
4966 parameters such calls should be redirected/updated. */
4967 unsigned len = vec_safe_length (id->src_node->clone.performed_splits);
4968 for (unsigned i = 0; i < len; i++)
4970 ipa_param_performed_split ps
4971 = (*id->src_node->clone.performed_splits)[i];
4972 ps.dummy_decl = remap_decl (ps.dummy_decl, id);
4973 vec_safe_push (id->dst_node->clone.performed_splits, ps);
4976 if (flag_checking)
4978 len = vec_safe_length (id->dst_node->clone.performed_splits);
4979 for (unsigned i = 0; i < len; i++)
4981 ipa_param_performed_split *ps1
4982 = &(*id->dst_node->clone.performed_splits)[i];
4983 for (unsigned j = i + 1; j < len; j++)
4985 ipa_param_performed_split *ps2
4986 = &(*id->dst_node->clone.performed_splits)[j];
4987 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
4988 || ps1->unit_offset != ps2->unit_offset);
4994 if (dump_enabled_p ())
4996 char buf[128];
4997 snprintf (buf, sizeof(buf), "%4.2f",
4998 cg_edge->sreal_frequency ().to_double ());
4999 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5000 call_stmt,
5001 "Inlining %C to %C with frequency %s\n",
5002 id->src_node, id->dst_node, buf);
5003 if (dump_file && (dump_flags & TDF_DETAILS))
5005 id->src_node->dump (dump_file);
5006 id->dst_node->dump (dump_file);
5010 /* This is it. Duplicate the callee body. Assume callee is
5011 pre-gimplified. Note that we must not alter the caller
5012 function in any way before this point, as this CALL_EXPR may be
5013 a self-referential call; if we're calling ourselves, we need to
5014 duplicate our body before altering anything. */
5015 copy_body (id, bb, return_block, NULL);
5017 reset_debug_bindings (id, stmt_gsi);
5019 if (flag_stack_reuse != SR_NONE)
5020 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5021 if (!TREE_THIS_VOLATILE (p))
5023 tree *varp = id->decl_map->get (p);
5024 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
5026 tree clobber = build_clobber (TREE_TYPE (*varp));
5027 gimple *clobber_stmt;
5028 clobber_stmt = gimple_build_assign (*varp, clobber);
5029 gimple_set_location (clobber_stmt, gimple_location (stmt));
5030 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5034 /* Reset the escaped solution. */
5035 if (cfun->gimple_df)
5036 pt_solution_reset (&cfun->gimple_df->escaped);
5038 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5039 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5041 size_t nargs = gimple_call_num_args (simtenter_stmt);
5042 vec<tree> *vars = id->dst_simt_vars;
5043 auto_vec<tree> newargs (nargs + vars->length ());
5044 for (size_t i = 0; i < nargs; i++)
5045 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5046 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5048 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5049 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5051 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5052 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5053 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5054 gsi_replace (&gsi, g, false);
5056 vec_free (id->dst_simt_vars);
5057 id->dst_simt_vars = simtvars_save;
5059 /* Clean up. */
5060 if (id->debug_map)
5062 delete id->debug_map;
5063 id->debug_map = dst;
5065 delete id->decl_map;
5066 id->decl_map = st;
5068 /* Unlink the calls virtual operands before replacing it. */
5069 unlink_stmt_vdef (stmt);
5070 if (gimple_vdef (stmt)
5071 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5072 release_ssa_name (gimple_vdef (stmt));
5074 /* If the inlined function returns a result that we care about,
5075 substitute the GIMPLE_CALL with an assignment of the return
5076 variable to the LHS of the call. That is, if STMT was
5077 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5078 if (use_retvar && gimple_call_lhs (stmt))
5080 gimple *old_stmt = stmt;
5081 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5082 gimple_set_location (stmt, gimple_location (old_stmt));
5083 gsi_replace (&stmt_gsi, stmt, false);
5084 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5085 /* Append a clobber for id->retvar if easily possible. */
5086 if (flag_stack_reuse != SR_NONE
5087 && id->retvar
5088 && VAR_P (id->retvar)
5089 && id->retvar != return_slot
5090 && id->retvar != modify_dest
5091 && !TREE_THIS_VOLATILE (id->retvar)
5092 && !is_gimple_reg (id->retvar)
5093 && !stmt_ends_bb_p (stmt))
5095 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5096 gimple *clobber_stmt;
5097 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5098 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5099 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5102 else
5104 /* Handle the case of inlining a function with no return
5105 statement, which causes the return value to become undefined. */
5106 if (gimple_call_lhs (stmt)
5107 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5109 tree name = gimple_call_lhs (stmt);
5110 tree var = SSA_NAME_VAR (name);
5111 tree def = var ? ssa_default_def (cfun, var) : NULL;
5113 if (def)
5115 /* If the variable is used undefined, make this name
5116 undefined via a move. */
5117 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5118 gsi_replace (&stmt_gsi, stmt, true);
5120 else
5122 if (!var)
5124 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5125 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5127 /* Otherwise make this variable undefined. */
5128 gsi_remove (&stmt_gsi, true);
5129 set_ssa_default_def (cfun, var, name);
5130 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5133 /* Replace with a clobber for id->retvar. */
5134 else if (flag_stack_reuse != SR_NONE
5135 && id->retvar
5136 && VAR_P (id->retvar)
5137 && id->retvar != return_slot
5138 && id->retvar != modify_dest
5139 && !TREE_THIS_VOLATILE (id->retvar)
5140 && !is_gimple_reg (id->retvar))
5142 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5143 gimple *clobber_stmt;
5144 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5145 gimple_set_location (clobber_stmt, gimple_location (stmt));
5146 gsi_replace (&stmt_gsi, clobber_stmt, false);
5147 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5149 else
5150 gsi_remove (&stmt_gsi, true);
5153 if (purge_dead_abnormal_edges)
5155 gimple_purge_dead_eh_edges (return_block);
5156 gimple_purge_dead_abnormal_call_edges (return_block);
5159 /* If the value of the new expression is ignored, that's OK. We
5160 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5161 the equivalent inlined version either. */
5162 if (is_gimple_assign (stmt))
5164 gcc_assert (gimple_assign_single_p (stmt)
5165 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5166 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5169 id->add_clobbers_to_eh_landing_pads = 0;
5171 /* Output the inlining info for this abstract function, since it has been
5172 inlined. If we don't do this now, we can lose the information about the
5173 variables in the function when the blocks get blown away as soon as we
5174 remove the cgraph node. */
5175 if (gimple_block (stmt))
5176 (*debug_hooks->outlining_inline_function) (fn);
5178 /* Update callgraph if needed. */
5179 cg_edge->callee->remove ();
5181 id->block = NULL_TREE;
5182 id->retvar = NULL_TREE;
5183 successfully_inlined = true;
5185 egress:
5186 input_location = saved_location;
5187 return successfully_inlined;
5190 /* Expand call statements reachable from STMT_P.
5191 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5192 in a MODIFY_EXPR. */
5194 static bool
5195 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
5197 gimple_stmt_iterator gsi;
5198 bool inlined = false;
5200 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5202 gimple *stmt = gsi_stmt (gsi);
5203 gsi_prev (&gsi);
5205 if (is_gimple_call (stmt)
5206 && !gimple_call_internal_p (stmt))
5207 inlined |= expand_call_inline (bb, stmt, id);
5210 return inlined;
5214 /* Walk all basic blocks created after FIRST and try to fold every statement
5215 in the STATEMENTS pointer set. */
5217 static void
5218 fold_marked_statements (int first, hash_set<gimple *> *statements)
5220 for (; first < last_basic_block_for_fn (cfun); first++)
5221 if (BASIC_BLOCK_FOR_FN (cfun, first))
5223 gimple_stmt_iterator gsi;
5225 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5226 !gsi_end_p (gsi);
5227 gsi_next (&gsi))
5228 if (statements->contains (gsi_stmt (gsi)))
5230 gimple *old_stmt = gsi_stmt (gsi);
5231 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5233 if (old_decl && fndecl_built_in_p (old_decl))
5235 /* Folding builtins can create multiple instructions,
5236 we need to look at all of them. */
5237 gimple_stmt_iterator i2 = gsi;
5238 gsi_prev (&i2);
5239 if (fold_stmt (&gsi))
5241 gimple *new_stmt;
5242 /* If a builtin at the end of a bb folded into nothing,
5243 the following loop won't work. */
5244 if (gsi_end_p (gsi))
5246 cgraph_update_edges_for_call_stmt (old_stmt,
5247 old_decl, NULL);
5248 break;
5250 if (gsi_end_p (i2))
5251 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5252 else
5253 gsi_next (&i2);
5254 while (1)
5256 new_stmt = gsi_stmt (i2);
5257 update_stmt (new_stmt);
5258 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5259 new_stmt);
5261 if (new_stmt == gsi_stmt (gsi))
5263 /* It is okay to check only for the very last
5264 of these statements. If it is a throwing
5265 statement nothing will change. If it isn't
5266 this can remove EH edges. If that weren't
5267 correct then because some intermediate stmts
5268 throw, but not the last one. That would mean
5269 we'd have to split the block, which we can't
5270 here and we'd loose anyway. And as builtins
5271 probably never throw, this all
5272 is mood anyway. */
5273 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5274 new_stmt))
5275 gimple_purge_dead_eh_edges (
5276 BASIC_BLOCK_FOR_FN (cfun, first));
5277 break;
5279 gsi_next (&i2);
5283 else if (fold_stmt (&gsi))
5285 /* Re-read the statement from GSI as fold_stmt() may
5286 have changed it. */
5287 gimple *new_stmt = gsi_stmt (gsi);
5288 update_stmt (new_stmt);
5290 if (is_gimple_call (old_stmt)
5291 || is_gimple_call (new_stmt))
5292 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5293 new_stmt);
5295 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5296 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5297 first));
5303 /* Expand calls to inline functions in the body of FN. */
5305 unsigned int
5306 optimize_inline_calls (tree fn)
5308 copy_body_data id;
5309 basic_block bb;
5310 int last = n_basic_blocks_for_fn (cfun);
5311 bool inlined_p = false;
5313 /* Clear out ID. */
5314 memset (&id, 0, sizeof (id));
5316 id.src_node = id.dst_node = cgraph_node::get (fn);
5317 gcc_assert (id.dst_node->definition);
5318 id.dst_fn = fn;
5319 /* Or any functions that aren't finished yet. */
5320 if (current_function_decl)
5321 id.dst_fn = current_function_decl;
5323 id.copy_decl = copy_decl_maybe_to_var;
5324 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5325 id.transform_new_cfg = false;
5326 id.transform_return_to_modify = true;
5327 id.transform_parameter = true;
5328 id.transform_lang_insert_block = NULL;
5329 id.statements_to_fold = new hash_set<gimple *>;
5331 push_gimplify_context ();
5333 /* We make no attempts to keep dominance info up-to-date. */
5334 free_dominance_info (CDI_DOMINATORS);
5335 free_dominance_info (CDI_POST_DOMINATORS);
5337 /* Register specific gimple functions. */
5338 gimple_register_cfg_hooks ();
5340 /* Reach the trees by walking over the CFG, and note the
5341 enclosing basic-blocks in the call edges. */
5342 /* We walk the blocks going forward, because inlined function bodies
5343 will split id->current_basic_block, and the new blocks will
5344 follow it; we'll trudge through them, processing their CALL_EXPRs
5345 along the way. */
5346 FOR_EACH_BB_FN (bb, cfun)
5347 inlined_p |= gimple_expand_calls_inline (bb, &id);
5349 pop_gimplify_context (NULL);
5351 if (flag_checking)
5353 struct cgraph_edge *e;
5355 id.dst_node->verify ();
5357 /* Double check that we inlined everything we are supposed to inline. */
5358 for (e = id.dst_node->callees; e; e = e->next_callee)
5359 gcc_assert (e->inline_failed);
5362 /* Fold queued statements. */
5363 update_max_bb_count ();
5364 fold_marked_statements (last, id.statements_to_fold);
5365 delete id.statements_to_fold;
5367 gcc_assert (!id.debug_stmts.exists ());
5369 /* If we didn't inline into the function there is nothing to do. */
5370 if (!inlined_p)
5371 return 0;
5373 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5374 number_blocks (fn);
5376 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5378 if (flag_checking)
5379 id.dst_node->verify ();
5381 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5382 not possible yet - the IPA passes might make various functions to not
5383 throw and they don't care to proactively update local EH info. This is
5384 done later in fixup_cfg pass that also execute the verification. */
5385 return (TODO_update_ssa
5386 | TODO_cleanup_cfg
5387 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5388 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5389 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5390 ? TODO_rebuild_frequencies : 0));
5393 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5395 tree
5396 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5398 enum tree_code code = TREE_CODE (*tp);
5399 enum tree_code_class cl = TREE_CODE_CLASS (code);
5401 /* We make copies of most nodes. */
5402 if (IS_EXPR_CODE_CLASS (cl)
5403 || code == TREE_LIST
5404 || code == TREE_VEC
5405 || code == TYPE_DECL
5406 || code == OMP_CLAUSE)
5408 /* Because the chain gets clobbered when we make a copy, we save it
5409 here. */
5410 tree chain = NULL_TREE, new_tree;
5412 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5413 chain = TREE_CHAIN (*tp);
5415 /* Copy the node. */
5416 new_tree = copy_node (*tp);
5418 *tp = new_tree;
5420 /* Now, restore the chain, if appropriate. That will cause
5421 walk_tree to walk into the chain as well. */
5422 if (code == PARM_DECL
5423 || code == TREE_LIST
5424 || code == OMP_CLAUSE)
5425 TREE_CHAIN (*tp) = chain;
5427 /* For now, we don't update BLOCKs when we make copies. So, we
5428 have to nullify all BIND_EXPRs. */
5429 if (TREE_CODE (*tp) == BIND_EXPR)
5430 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5432 else if (code == CONSTRUCTOR)
5434 /* CONSTRUCTOR nodes need special handling because
5435 we need to duplicate the vector of elements. */
5436 tree new_tree;
5438 new_tree = copy_node (*tp);
5439 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5440 *tp = new_tree;
5442 else if (code == STATEMENT_LIST)
5443 /* We used to just abort on STATEMENT_LIST, but we can run into them
5444 with statement-expressions (c++/40975). */
5445 copy_statement_list (tp);
5446 else if (TREE_CODE_CLASS (code) == tcc_type)
5447 *walk_subtrees = 0;
5448 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5449 *walk_subtrees = 0;
5450 else if (TREE_CODE_CLASS (code) == tcc_constant)
5451 *walk_subtrees = 0;
5452 return NULL_TREE;
5455 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5456 information indicating to what new SAVE_EXPR this one should be mapped,
5457 use that one. Otherwise, create a new node and enter it in ST. FN is
5458 the function into which the copy will be placed. */
5460 static void
5461 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5463 tree *n;
5464 tree t;
5466 /* See if we already encountered this SAVE_EXPR. */
5467 n = st->get (*tp);
5469 /* If we didn't already remap this SAVE_EXPR, do so now. */
5470 if (!n)
5472 t = copy_node (*tp);
5474 /* Remember this SAVE_EXPR. */
5475 st->put (*tp, t);
5476 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5477 st->put (t, t);
5479 else
5481 /* We've already walked into this SAVE_EXPR; don't do it again. */
5482 *walk_subtrees = 0;
5483 t = *n;
5486 /* Replace this SAVE_EXPR with the copy. */
5487 *tp = t;
5490 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5491 label, copies the declaration and enters it in the splay_tree in DATA (which
5492 is really a 'copy_body_data *'. */
5494 static tree
5495 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5496 bool *handled_ops_p ATTRIBUTE_UNUSED,
5497 struct walk_stmt_info *wi)
5499 copy_body_data *id = (copy_body_data *) wi->info;
5500 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5502 if (stmt)
5504 tree decl = gimple_label_label (stmt);
5506 /* Copy the decl and remember the copy. */
5507 insert_decl_map (id, decl, id->copy_decl (decl, id));
5510 return NULL_TREE;
5513 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5514 struct walk_stmt_info *wi);
5516 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5517 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5518 remaps all local declarations to appropriate replacements in gimple
5519 operands. */
5521 static tree
5522 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5524 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5525 copy_body_data *id = (copy_body_data *) wi->info;
5526 hash_map<tree, tree> *st = id->decl_map;
5527 tree *n;
5528 tree expr = *tp;
5530 /* For recursive invocations this is no longer the LHS itself. */
5531 bool is_lhs = wi->is_lhs;
5532 wi->is_lhs = false;
5534 if (TREE_CODE (expr) == SSA_NAME)
5536 *tp = remap_ssa_name (*tp, id);
5537 *walk_subtrees = 0;
5538 if (is_lhs)
5539 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5541 /* Only a local declaration (variable or label). */
5542 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5543 || TREE_CODE (expr) == LABEL_DECL)
5545 /* Lookup the declaration. */
5546 n = st->get (expr);
5548 /* If it's there, remap it. */
5549 if (n)
5550 *tp = *n;
5551 *walk_subtrees = 0;
5553 else if (TREE_CODE (expr) == STATEMENT_LIST
5554 || TREE_CODE (expr) == BIND_EXPR
5555 || TREE_CODE (expr) == SAVE_EXPR)
5556 gcc_unreachable ();
5557 else if (TREE_CODE (expr) == TARGET_EXPR)
5559 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5560 It's OK for this to happen if it was part of a subtree that
5561 isn't immediately expanded, such as operand 2 of another
5562 TARGET_EXPR. */
5563 if (!TREE_OPERAND (expr, 1))
5565 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5566 TREE_OPERAND (expr, 3) = NULL_TREE;
5569 else if (TREE_CODE (expr) == OMP_CLAUSE)
5571 /* Before the omplower pass completes, some OMP clauses can contain
5572 sequences that are neither copied by gimple_seq_copy nor walked by
5573 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5574 in those situations, we have to copy and process them explicitely. */
5576 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5578 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5579 seq = duplicate_remap_omp_clause_seq (seq, wi);
5580 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5582 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5584 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5585 seq = duplicate_remap_omp_clause_seq (seq, wi);
5586 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5588 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5590 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5591 seq = duplicate_remap_omp_clause_seq (seq, wi);
5592 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5593 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5594 seq = duplicate_remap_omp_clause_seq (seq, wi);
5595 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5599 /* Keep iterating. */
5600 return NULL_TREE;
5604 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5605 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5606 remaps all local declarations to appropriate replacements in gimple
5607 statements. */
5609 static tree
5610 replace_locals_stmt (gimple_stmt_iterator *gsip,
5611 bool *handled_ops_p ATTRIBUTE_UNUSED,
5612 struct walk_stmt_info *wi)
5614 copy_body_data *id = (copy_body_data *) wi->info;
5615 gimple *gs = gsi_stmt (*gsip);
5617 if (gbind *stmt = dyn_cast <gbind *> (gs))
5619 tree block = gimple_bind_block (stmt);
5621 if (block)
5623 remap_block (&block, id);
5624 gimple_bind_set_block (stmt, block);
5627 /* This will remap a lot of the same decls again, but this should be
5628 harmless. */
5629 if (gimple_bind_vars (stmt))
5631 tree old_var, decls = gimple_bind_vars (stmt);
5633 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5634 if (!can_be_nonlocal (old_var, id)
5635 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5636 remap_decl (old_var, id);
5638 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5639 id->prevent_decl_creation_for_types = true;
5640 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5641 id->prevent_decl_creation_for_types = false;
5645 /* Keep iterating. */
5646 return NULL_TREE;
5649 /* Create a copy of SEQ and remap all decls in it. */
5651 static gimple_seq
5652 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5654 if (!seq)
5655 return NULL;
5657 /* If there are any labels in OMP sequences, they can be only referred to in
5658 the sequence itself and therefore we can do both here. */
5659 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5660 gimple_seq copy = gimple_seq_copy (seq);
5661 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5662 return copy;
5665 /* Copies everything in SEQ and replaces variables and labels local to
5666 current_function_decl. */
5668 gimple_seq
5669 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5671 copy_body_data id;
5672 struct walk_stmt_info wi;
5673 gimple_seq copy;
5675 /* There's nothing to do for NULL_TREE. */
5676 if (seq == NULL)
5677 return seq;
5679 /* Set up ID. */
5680 memset (&id, 0, sizeof (id));
5681 id.src_fn = current_function_decl;
5682 id.dst_fn = current_function_decl;
5683 id.src_cfun = cfun;
5684 id.decl_map = new hash_map<tree, tree>;
5685 id.debug_map = NULL;
5687 id.copy_decl = copy_decl_no_change;
5688 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5689 id.transform_new_cfg = false;
5690 id.transform_return_to_modify = false;
5691 id.transform_parameter = false;
5692 id.transform_lang_insert_block = NULL;
5694 /* Walk the tree once to find local labels. */
5695 memset (&wi, 0, sizeof (wi));
5696 hash_set<tree> visited;
5697 wi.info = &id;
5698 wi.pset = &visited;
5699 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5701 copy = gimple_seq_copy (seq);
5703 /* Walk the copy, remapping decls. */
5704 memset (&wi, 0, sizeof (wi));
5705 wi.info = &id;
5706 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5708 /* Clean up. */
5709 delete id.decl_map;
5710 if (id.debug_map)
5711 delete id.debug_map;
5712 if (id.dependence_map)
5714 delete id.dependence_map;
5715 id.dependence_map = NULL;
5718 return copy;
5722 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5724 static tree
5725 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5727 if (*tp == data)
5728 return (tree) data;
5729 else
5730 return NULL;
5733 DEBUG_FUNCTION bool
5734 debug_find_tree (tree top, tree search)
5736 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5740 /* Declare the variables created by the inliner. Add all the variables in
5741 VARS to BIND_EXPR. */
5743 static void
5744 declare_inline_vars (tree block, tree vars)
5746 tree t;
5747 for (t = vars; t; t = DECL_CHAIN (t))
5749 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5750 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5751 add_local_decl (cfun, t);
5754 if (block)
5755 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5758 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5759 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5760 VAR_DECL translation. */
5762 tree
5763 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5765 /* Don't generate debug information for the copy if we wouldn't have
5766 generated it for the copy either. */
5767 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5768 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5770 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5771 declaration inspired this copy. */
5772 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5774 /* The new variable/label has no RTL, yet. */
5775 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5776 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5777 SET_DECL_RTL (copy, 0);
5778 /* For vector typed decls make sure to update DECL_MODE according
5779 to the new function context. */
5780 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5781 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5783 /* These args would always appear unused, if not for this. */
5784 TREE_USED (copy) = 1;
5786 /* Set the context for the new declaration. */
5787 if (!DECL_CONTEXT (decl))
5788 /* Globals stay global. */
5790 else if (DECL_CONTEXT (decl) != id->src_fn)
5791 /* Things that weren't in the scope of the function we're inlining
5792 from aren't in the scope we're inlining to, either. */
5794 else if (TREE_STATIC (decl))
5795 /* Function-scoped static variables should stay in the original
5796 function. */
5798 else
5800 /* Ordinary automatic local variables are now in the scope of the
5801 new function. */
5802 DECL_CONTEXT (copy) = id->dst_fn;
5803 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5805 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5806 DECL_ATTRIBUTES (copy)
5807 = tree_cons (get_identifier ("omp simt private"), NULL,
5808 DECL_ATTRIBUTES (copy));
5809 id->dst_simt_vars->safe_push (copy);
5813 return copy;
5816 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5817 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5818 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5820 tree
5821 copy_decl_to_var (tree decl, copy_body_data *id)
5823 tree copy, type;
5825 gcc_assert (TREE_CODE (decl) == PARM_DECL
5826 || TREE_CODE (decl) == RESULT_DECL);
5828 type = TREE_TYPE (decl);
5830 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5831 VAR_DECL, DECL_NAME (decl), type);
5832 if (DECL_PT_UID_SET_P (decl))
5833 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5834 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5835 TREE_READONLY (copy) = TREE_READONLY (decl);
5836 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5837 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5839 return copy_decl_for_dup_finish (id, decl, copy);
5842 /* Like copy_decl_to_var, but create a return slot object instead of a
5843 pointer variable for return by invisible reference. */
5845 static tree
5846 copy_result_decl_to_var (tree decl, copy_body_data *id)
5848 tree copy, type;
5850 gcc_assert (TREE_CODE (decl) == PARM_DECL
5851 || TREE_CODE (decl) == RESULT_DECL);
5853 type = TREE_TYPE (decl);
5854 if (DECL_BY_REFERENCE (decl))
5855 type = TREE_TYPE (type);
5857 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5858 VAR_DECL, DECL_NAME (decl), type);
5859 if (DECL_PT_UID_SET_P (decl))
5860 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5861 TREE_READONLY (copy) = TREE_READONLY (decl);
5862 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5863 if (!DECL_BY_REFERENCE (decl))
5865 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5866 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5869 return copy_decl_for_dup_finish (id, decl, copy);
5872 tree
5873 copy_decl_no_change (tree decl, copy_body_data *id)
5875 tree copy;
5877 copy = copy_node (decl);
5879 /* The COPY is not abstract; it will be generated in DST_FN. */
5880 DECL_ABSTRACT_P (copy) = false;
5881 lang_hooks.dup_lang_specific_decl (copy);
5883 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5884 been taken; it's for internal bookkeeping in expand_goto_internal. */
5885 if (TREE_CODE (copy) == LABEL_DECL)
5887 TREE_ADDRESSABLE (copy) = 0;
5888 LABEL_DECL_UID (copy) = -1;
5891 return copy_decl_for_dup_finish (id, decl, copy);
5894 static tree
5895 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5897 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5898 return copy_decl_to_var (decl, id);
5899 else
5900 return copy_decl_no_change (decl, id);
5903 /* Return a copy of the function's argument tree without any modifications. */
5905 static tree
5906 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
5908 tree arg, *parg;
5909 tree new_parm = NULL;
5911 parg = &new_parm;
5912 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
5914 tree new_tree = remap_decl (arg, id);
5915 if (TREE_CODE (new_tree) != PARM_DECL)
5916 new_tree = id->copy_decl (arg, id);
5917 lang_hooks.dup_lang_specific_decl (new_tree);
5918 *parg = new_tree;
5919 parg = &DECL_CHAIN (new_tree);
5921 return new_parm;
5924 /* Return a copy of the function's static chain. */
5925 static tree
5926 copy_static_chain (tree static_chain, copy_body_data * id)
5928 tree *chain_copy, *pvar;
5930 chain_copy = &static_chain;
5931 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5933 tree new_tree = remap_decl (*pvar, id);
5934 lang_hooks.dup_lang_specific_decl (new_tree);
5935 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5936 *pvar = new_tree;
5938 return static_chain;
5941 /* Return true if the function is allowed to be versioned.
5942 This is a guard for the versioning functionality. */
5944 bool
5945 tree_versionable_function_p (tree fndecl)
5947 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5948 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5951 /* Update clone info after duplication. */
5953 static void
5954 update_clone_info (copy_body_data * id)
5956 vec<ipa_param_performed_split, va_gc> *cur_performed_splits
5957 = id->dst_node->clone.performed_splits;
5958 if (cur_performed_splits)
5960 unsigned len = cur_performed_splits->length ();
5961 for (unsigned i = 0; i < len; i++)
5963 ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
5964 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
5968 struct cgraph_node *node;
5969 if (!id->dst_node->clones)
5970 return;
5971 for (node = id->dst_node->clones; node != id->dst_node;)
5973 /* First update replace maps to match the new body. */
5974 if (node->clone.tree_map)
5976 unsigned int i;
5977 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5979 struct ipa_replace_map *replace_info;
5980 replace_info = (*node->clone.tree_map)[i];
5981 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5984 if (node->clone.performed_splits)
5986 unsigned len = vec_safe_length (node->clone.performed_splits);
5987 for (unsigned i = 0; i < len; i++)
5989 ipa_param_performed_split *ps
5990 = &(*node->clone.performed_splits)[i];
5991 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
5994 if (unsigned len = vec_safe_length (cur_performed_splits))
5996 /* We do not want to add current performed splits when we are saving
5997 a copy of function body for later during inlining, that would just
5998 duplicate all entries. So let's have a look whether anything
5999 referring to the first dummy_decl is present. */
6000 unsigned dst_len = vec_safe_length (node->clone.performed_splits);
6001 ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6002 for (unsigned i = 0; i < dst_len; i++)
6003 if ((*node->clone.performed_splits)[i].dummy_decl
6004 == first->dummy_decl)
6006 len = 0;
6007 break;
6010 for (unsigned i = 0; i < len; i++)
6011 vec_safe_push (node->clone.performed_splits,
6012 (*cur_performed_splits)[i]);
6013 if (flag_checking)
6015 for (unsigned i = 0; i < dst_len; i++)
6017 ipa_param_performed_split *ps1
6018 = &(*node->clone.performed_splits)[i];
6019 for (unsigned j = i + 1; j < dst_len; j++)
6021 ipa_param_performed_split *ps2
6022 = &(*node->clone.performed_splits)[j];
6023 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6024 || ps1->unit_offset != ps2->unit_offset);
6030 if (node->clones)
6031 node = node->clones;
6032 else if (node->next_sibling_clone)
6033 node = node->next_sibling_clone;
6034 else
6036 while (node != id->dst_node && !node->next_sibling_clone)
6037 node = node->clone_of;
6038 if (node != id->dst_node)
6039 node = node->next_sibling_clone;
6044 /* Create a copy of a function's tree.
6045 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6046 of the original function and the new copied function
6047 respectively. In case we want to replace a DECL
6048 tree with another tree while duplicating the function's
6049 body, TREE_MAP represents the mapping between these
6050 trees. If UPDATE_CLONES is set, the call_stmt fields
6051 of edges of clones of the function will be updated.
6053 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6054 function parameters and return value) should be modified).
6055 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6056 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6058 void
6059 tree_function_versioning (tree old_decl, tree new_decl,
6060 vec<ipa_replace_map *, va_gc> *tree_map,
6061 ipa_param_adjustments *param_adjustments,
6062 bool update_clones, bitmap blocks_to_copy,
6063 basic_block new_entry)
6065 struct cgraph_node *old_version_node;
6066 struct cgraph_node *new_version_node;
6067 copy_body_data id;
6068 tree p;
6069 unsigned i;
6070 struct ipa_replace_map *replace_info;
6071 basic_block old_entry_block, bb;
6072 auto_vec<gimple *, 10> init_stmts;
6073 tree vars = NULL_TREE;
6075 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6076 && TREE_CODE (new_decl) == FUNCTION_DECL);
6077 DECL_POSSIBLY_INLINED (old_decl) = 1;
6079 old_version_node = cgraph_node::get (old_decl);
6080 gcc_checking_assert (old_version_node);
6081 new_version_node = cgraph_node::get (new_decl);
6082 gcc_checking_assert (new_version_node);
6084 /* Copy over debug args. */
6085 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6087 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6088 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6089 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6090 old_debug_args = decl_debug_args_lookup (old_decl);
6091 if (old_debug_args)
6093 new_debug_args = decl_debug_args_insert (new_decl);
6094 *new_debug_args = vec_safe_copy (*old_debug_args);
6098 /* Output the inlining info for this abstract function, since it has been
6099 inlined. If we don't do this now, we can lose the information about the
6100 variables in the function when the blocks get blown away as soon as we
6101 remove the cgraph node. */
6102 (*debug_hooks->outlining_inline_function) (old_decl);
6104 DECL_ARTIFICIAL (new_decl) = 1;
6105 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6106 if (DECL_ORIGIN (old_decl) == old_decl)
6107 old_version_node->used_as_abstract_origin = true;
6108 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6110 /* Prepare the data structures for the tree copy. */
6111 memset (&id, 0, sizeof (id));
6113 /* Generate a new name for the new version. */
6114 id.statements_to_fold = new hash_set<gimple *>;
6116 id.decl_map = new hash_map<tree, tree>;
6117 id.debug_map = NULL;
6118 id.src_fn = old_decl;
6119 id.dst_fn = new_decl;
6120 id.src_node = old_version_node;
6121 id.dst_node = new_version_node;
6122 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6123 id.blocks_to_copy = blocks_to_copy;
6125 id.copy_decl = copy_decl_no_change;
6126 id.transform_call_graph_edges
6127 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6128 id.transform_new_cfg = true;
6129 id.transform_return_to_modify = false;
6130 id.transform_parameter = false;
6131 id.transform_lang_insert_block = NULL;
6133 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6134 (DECL_STRUCT_FUNCTION (old_decl));
6135 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6136 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6137 initialize_cfun (new_decl, old_decl,
6138 new_entry ? new_entry->count : old_entry_block->count);
6139 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6140 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6141 = id.src_cfun->gimple_df->ipa_pta;
6143 /* Copy the function's static chain. */
6144 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6145 if (p)
6146 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6147 = copy_static_chain (p, &id);
6149 auto_vec<int, 16> new_param_indices;
6150 ipa_param_adjustments *old_param_adjustments
6151 = old_version_node->clone.param_adjustments;
6152 if (old_param_adjustments)
6153 old_param_adjustments->get_updated_indices (&new_param_indices);
6155 /* If there's a tree_map, prepare for substitution. */
6156 if (tree_map)
6157 for (i = 0; i < tree_map->length (); i++)
6159 gimple *init;
6160 replace_info = (*tree_map)[i];
6162 int p = replace_info->parm_num;
6163 if (old_param_adjustments)
6164 p = new_param_indices[p];
6166 tree parm;
6167 tree req_type, new_type;
6169 for (parm = DECL_ARGUMENTS (old_decl); p;
6170 parm = DECL_CHAIN (parm))
6171 p--;
6172 tree old_tree = parm;
6173 req_type = TREE_TYPE (parm);
6174 new_type = TREE_TYPE (replace_info->new_tree);
6175 if (!useless_type_conversion_p (req_type, new_type))
6177 if (fold_convertible_p (req_type, replace_info->new_tree))
6178 replace_info->new_tree
6179 = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
6180 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6181 replace_info->new_tree
6182 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
6183 replace_info->new_tree);
6184 else
6186 if (dump_file)
6188 fprintf (dump_file, " const ");
6189 print_generic_expr (dump_file,
6190 replace_info->new_tree);
6191 fprintf (dump_file,
6192 " can't be converted to param ");
6193 print_generic_expr (dump_file, parm);
6194 fprintf (dump_file, "\n");
6196 old_tree = NULL;
6200 if (old_tree)
6202 init = setup_one_parameter (&id, old_tree, replace_info->new_tree,
6203 id.src_fn, NULL, &vars);
6204 if (init)
6205 init_stmts.safe_push (init);
6209 ipa_param_body_adjustments *param_body_adjs = NULL;
6210 if (param_adjustments)
6212 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6213 new_decl, old_decl,
6214 &id, &vars, tree_map);
6215 id.param_body_adjs = param_body_adjs;
6216 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6218 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6219 DECL_ARGUMENTS (new_decl)
6220 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6222 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6223 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6225 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6227 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6228 /* Add local vars. */
6229 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6231 if (DECL_RESULT (old_decl) == NULL_TREE)
6233 else if (param_adjustments && param_adjustments->m_skip_return
6234 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6236 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6237 &id);
6238 declare_inline_vars (NULL, resdecl_repl);
6239 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6241 DECL_RESULT (new_decl)
6242 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6243 RESULT_DECL, NULL_TREE, void_type_node);
6244 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6245 DECL_IS_MALLOC (new_decl) = false;
6246 cfun->returns_struct = 0;
6247 cfun->returns_pcc_struct = 0;
6249 else
6251 tree old_name;
6252 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6253 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6254 if (gimple_in_ssa_p (id.src_cfun)
6255 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6256 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6258 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6259 insert_decl_map (&id, old_name, new_name);
6260 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6261 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6265 /* Set up the destination functions loop tree. */
6266 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6268 cfun->curr_properties &= ~PROP_loops;
6269 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6270 cfun->curr_properties |= PROP_loops;
6273 /* Copy the Function's body. */
6274 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6275 new_entry);
6277 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6278 number_blocks (new_decl);
6280 /* We want to create the BB unconditionally, so that the addition of
6281 debug stmts doesn't affect BB count, which may in the end cause
6282 codegen differences. */
6283 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6284 while (init_stmts.length ())
6285 insert_init_stmt (&id, bb, init_stmts.pop ());
6286 update_clone_info (&id);
6288 /* Remap the nonlocal_goto_save_area, if any. */
6289 if (cfun->nonlocal_goto_save_area)
6291 struct walk_stmt_info wi;
6293 memset (&wi, 0, sizeof (wi));
6294 wi.info = &id;
6295 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6298 /* Clean up. */
6299 delete id.decl_map;
6300 if (id.debug_map)
6301 delete id.debug_map;
6302 free_dominance_info (CDI_DOMINATORS);
6303 free_dominance_info (CDI_POST_DOMINATORS);
6305 update_max_bb_count ();
6306 fold_marked_statements (0, id.statements_to_fold);
6307 delete id.statements_to_fold;
6308 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6309 if (id.dst_node->definition)
6310 cgraph_edge::rebuild_references ();
6311 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6313 calculate_dominance_info (CDI_DOMINATORS);
6314 fix_loop_structure (NULL);
6316 update_ssa (TODO_update_ssa);
6318 /* After partial cloning we need to rescale frequencies, so they are
6319 within proper range in the cloned function. */
6320 if (new_entry)
6322 struct cgraph_edge *e;
6323 rebuild_frequencies ();
6325 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6326 for (e = new_version_node->callees; e; e = e->next_callee)
6328 basic_block bb = gimple_bb (e->call_stmt);
6329 e->count = bb->count;
6331 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6333 basic_block bb = gimple_bb (e->call_stmt);
6334 e->count = bb->count;
6338 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6340 vec<tree, va_gc> **debug_args = NULL;
6341 unsigned int len = 0;
6342 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6344 for (i = 0; i < reset_len; i++)
6346 tree parm = param_body_adjs->m_reset_debug_decls[i];
6347 gcc_assert (is_gimple_reg (parm));
6348 tree ddecl;
6350 if (debug_args == NULL)
6352 debug_args = decl_debug_args_insert (new_decl);
6353 len = vec_safe_length (*debug_args);
6355 ddecl = make_node (DEBUG_EXPR_DECL);
6356 DECL_ARTIFICIAL (ddecl) = 1;
6357 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6358 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6359 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6360 vec_safe_push (*debug_args, ddecl);
6362 if (debug_args != NULL)
6364 /* On the callee side, add
6365 DEBUG D#Y s=> parm
6366 DEBUG var => D#Y
6367 stmts to the first bb where var is a VAR_DECL created for the
6368 optimized away parameter in DECL_INITIAL block. This hints
6369 in the debug info that var (whole DECL_ORIGIN is the parm
6370 PARM_DECL) is optimized away, but could be looked up at the
6371 call site as value of D#X there. */
6372 tree vexpr;
6373 gimple_stmt_iterator cgsi
6374 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6375 gimple *def_temp;
6376 tree var = vars;
6377 i = vec_safe_length (*debug_args);
6380 i -= 2;
6381 while (var != NULL_TREE
6382 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6383 var = TREE_CHAIN (var);
6384 if (var == NULL_TREE)
6385 break;
6386 vexpr = make_node (DEBUG_EXPR_DECL);
6387 tree parm = (**debug_args)[i];
6388 DECL_ARTIFICIAL (vexpr) = 1;
6389 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6390 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6391 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6392 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6393 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6394 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6396 while (i > len);
6399 delete param_body_adjs;
6400 free_dominance_info (CDI_DOMINATORS);
6401 free_dominance_info (CDI_POST_DOMINATORS);
6403 gcc_assert (!id.debug_stmts.exists ());
6404 pop_cfun ();
6405 return;
6408 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6409 the callee and return the inlined body on success. */
6411 tree
6412 maybe_inline_call_in_expr (tree exp)
6414 tree fn = get_callee_fndecl (exp);
6416 /* We can only try to inline "const" functions. */
6417 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6419 call_expr_arg_iterator iter;
6420 copy_body_data id;
6421 tree param, arg, t;
6422 hash_map<tree, tree> decl_map;
6424 /* Remap the parameters. */
6425 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6426 param;
6427 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6428 decl_map.put (param, arg);
6430 memset (&id, 0, sizeof (id));
6431 id.src_fn = fn;
6432 id.dst_fn = current_function_decl;
6433 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6434 id.decl_map = &decl_map;
6436 id.copy_decl = copy_decl_no_change;
6437 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6438 id.transform_new_cfg = false;
6439 id.transform_return_to_modify = true;
6440 id.transform_parameter = true;
6441 id.transform_lang_insert_block = NULL;
6443 /* Make sure not to unshare trees behind the front-end's back
6444 since front-end specific mechanisms may rely on sharing. */
6445 id.regimplify = false;
6446 id.do_not_unshare = true;
6448 /* We're not inside any EH region. */
6449 id.eh_lp_nr = 0;
6451 t = copy_tree_body (&id);
6453 /* We can only return something suitable for use in a GENERIC
6454 expression tree. */
6455 if (TREE_CODE (t) == MODIFY_EXPR)
6456 return TREE_OPERAND (t, 1);
6459 return NULL_TREE;
6462 /* Duplicate a type, fields and all. */
6464 tree
6465 build_duplicate_type (tree type)
6467 struct copy_body_data id;
6469 memset (&id, 0, sizeof (id));
6470 id.src_fn = current_function_decl;
6471 id.dst_fn = current_function_decl;
6472 id.src_cfun = cfun;
6473 id.decl_map = new hash_map<tree, tree>;
6474 id.debug_map = NULL;
6475 id.copy_decl = copy_decl_no_change;
6477 type = remap_type_1 (type, &id);
6479 delete id.decl_map;
6480 if (id.debug_map)
6481 delete id.debug_map;
6483 TYPE_CANONICAL (type) = type;
6485 return type;
6488 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6489 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6490 evaluation. */
6492 tree
6493 copy_fn (tree fn, tree& parms, tree& result)
6495 copy_body_data id;
6496 tree param;
6497 hash_map<tree, tree> decl_map;
6499 tree *p = &parms;
6500 *p = NULL_TREE;
6502 memset (&id, 0, sizeof (id));
6503 id.src_fn = fn;
6504 id.dst_fn = current_function_decl;
6505 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6506 id.decl_map = &decl_map;
6508 id.copy_decl = copy_decl_no_change;
6509 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6510 id.transform_new_cfg = false;
6511 id.transform_return_to_modify = false;
6512 id.transform_parameter = true;
6513 id.transform_lang_insert_block = NULL;
6515 /* Make sure not to unshare trees behind the front-end's back
6516 since front-end specific mechanisms may rely on sharing. */
6517 id.regimplify = false;
6518 id.do_not_unshare = true;
6519 id.do_not_fold = true;
6521 /* We're not inside any EH region. */
6522 id.eh_lp_nr = 0;
6524 /* Remap the parameters and result and return them to the caller. */
6525 for (param = DECL_ARGUMENTS (fn);
6526 param;
6527 param = DECL_CHAIN (param))
6529 *p = remap_decl (param, &id);
6530 p = &DECL_CHAIN (*p);
6533 if (DECL_RESULT (fn))
6534 result = remap_decl (DECL_RESULT (fn), &id);
6535 else
6536 result = NULL_TREE;
6538 return copy_tree_body (&id);