Add prefixed insn support for stack_protect_setdi & stack_protect_testdi
[official-gcc.git] / gcc / tree-inline.c
blob63274f746796d26db105293d52ef8dc53280e3b0
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64 #include "tree-ssa-live.h"
66 /* I'm not real happy about this, but we need to handle gimple and
67 non-gimple trees. */
69 /* Inlining, Cloning, Versioning, Parallelization
71 Inlining: a function body is duplicated, but the PARM_DECLs are
72 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
73 MODIFY_EXPRs that store to a dedicated returned-value variable.
74 The duplicated eh_region info of the copy will later be appended
75 to the info for the caller; the eh_region info in copied throwing
76 statements and RESX statements are adjusted accordingly.
78 Cloning: (only in C++) We have one body for a con/de/structor, and
79 multiple function decls, each with a unique parameter list.
80 Duplicate the body, using the given splay tree; some parameters
81 will become constants (like 0 or 1).
83 Versioning: a function body is duplicated and the result is a new
84 function rather than into blocks of an existing function as with
85 inlining. Some parameters will become constants.
87 Parallelization: a region of a function is duplicated resulting in
88 a new function. Variables may be replaced with complex expressions
89 to enable shared variable semantics.
91 All of these will simultaneously lookup any callgraph edges. If
92 we're going to inline the duplicated function body, and the given
93 function has some cloned callgraph nodes (one for each place this
94 function will be inlined) those callgraph edges will be duplicated.
95 If we're cloning the body, those callgraph edges will be
96 updated to point into the new body. (Note that the original
97 callgraph node and edge list will not be altered.)
99 See the CALL_EXPR handling case in copy_tree_body_r (). */
101 /* To Do:
103 o In order to make inlining-on-trees work, we pessimized
104 function-local static constants. In particular, they are now
105 always output, even when not addressed. Fix this by treating
106 function-local static constants just like global static
107 constants; the back-end already knows not to output them if they
108 are not needed.
110 o Provide heuristics to clamp inlining of recursive template
111 calls? */
114 /* Weights that estimate_num_insns uses to estimate the size of the
115 produced code. */
117 eni_weights eni_size_weights;
119 /* Weights that estimate_num_insns uses to estimate the time necessary
120 to execute the produced code. */
122 eni_weights eni_time_weights;
124 /* Prototypes. */
126 static tree declare_return_variable (copy_body_data *, tree, tree,
127 basic_block);
128 static void remap_block (tree *, copy_body_data *);
129 static void copy_bind_expr (tree *, int *, copy_body_data *);
130 static void declare_inline_vars (tree, tree);
131 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
132 static void prepend_lexical_block (tree current_block, tree new_block);
133 static tree copy_result_decl_to_var (tree, copy_body_data *);
134 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
135 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
136 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
141 void
142 insert_decl_map (copy_body_data *id, tree key, tree value)
144 id->decl_map->put (key, value);
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
148 if (key != value)
149 id->decl_map->put (value, value);
152 /* Insert a tree->tree mapping for ID. This is only used for
153 variables. */
155 static void
156 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
158 if (!gimple_in_ssa_p (id->src_cfun))
159 return;
161 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162 return;
164 if (!target_for_debug_bind (key))
165 return;
167 gcc_assert (TREE_CODE (key) == PARM_DECL);
168 gcc_assert (VAR_P (value));
170 if (!id->debug_map)
171 id->debug_map = new hash_map<tree, tree>;
173 id->debug_map->put (key, value);
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
179 context. */
180 static int processing_debug_stmt = 0;
182 /* Construct new SSA name for old NAME. ID is the inline context. */
184 static tree
185 remap_ssa_name (tree name, copy_body_data *id)
187 tree new_tree, var;
188 tree *n;
190 gcc_assert (TREE_CODE (name) == SSA_NAME);
192 n = id->decl_map->get (name);
193 if (n)
195 /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
196 remove an unused LHS from a call statement. Such LHS can however
197 still appear in debug statements, but their value is lost in this
198 function and we do not want to map them. */
199 if (id->killed_new_ssa_names
200 && id->killed_new_ssa_names->contains (*n))
202 gcc_assert (processing_debug_stmt);
203 processing_debug_stmt = -1;
204 return name;
207 return unshare_expr (*n);
210 if (processing_debug_stmt)
212 if (SSA_NAME_IS_DEFAULT_DEF (name)
213 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
214 && id->entry_bb == NULL
215 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
217 tree vexpr = make_node (DEBUG_EXPR_DECL);
218 gimple *def_temp;
219 gimple_stmt_iterator gsi;
220 tree val = SSA_NAME_VAR (name);
222 n = id->decl_map->get (val);
223 if (n != NULL)
224 val = *n;
225 if (TREE_CODE (val) != PARM_DECL
226 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
228 processing_debug_stmt = -1;
229 return name;
231 n = id->decl_map->get (val);
232 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
233 return *n;
234 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
235 DECL_ARTIFICIAL (vexpr) = 1;
236 TREE_TYPE (vexpr) = TREE_TYPE (name);
237 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
238 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
239 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
240 insert_decl_map (id, val, vexpr);
241 return vexpr;
244 processing_debug_stmt = -1;
245 return name;
248 /* Remap anonymous SSA names or SSA names of anonymous decls. */
249 var = SSA_NAME_VAR (name);
250 if (!var
251 || (!SSA_NAME_IS_DEFAULT_DEF (name)
252 && VAR_P (var)
253 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
254 && DECL_ARTIFICIAL (var)
255 && DECL_IGNORED_P (var)
256 && !DECL_NAME (var)))
258 struct ptr_info_def *pi;
259 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
260 if (!var && SSA_NAME_IDENTIFIER (name))
261 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
262 insert_decl_map (id, name, new_tree);
263 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
264 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
265 /* At least IPA points-to info can be directly transferred. */
266 if (id->src_cfun->gimple_df
267 && id->src_cfun->gimple_df->ipa_pta
268 && POINTER_TYPE_P (TREE_TYPE (name))
269 && (pi = SSA_NAME_PTR_INFO (name))
270 && !pi->pt.anything)
272 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
273 new_pi->pt = pi->pt;
275 /* So can range-info. */
276 if (!POINTER_TYPE_P (TREE_TYPE (name))
277 && SSA_NAME_RANGE_INFO (name))
278 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
279 SSA_NAME_RANGE_INFO (name));
280 return new_tree;
283 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
284 in copy_bb. */
285 new_tree = remap_decl (var, id);
287 /* We might've substituted constant or another SSA_NAME for
288 the variable.
290 Replace the SSA name representing RESULT_DECL by variable during
291 inlining: this saves us from need to introduce PHI node in a case
292 return value is just partly initialized. */
293 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
294 && (!SSA_NAME_VAR (name)
295 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
296 || !id->transform_return_to_modify))
298 struct ptr_info_def *pi;
299 new_tree = make_ssa_name (new_tree);
300 insert_decl_map (id, name, new_tree);
301 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
302 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
303 /* At least IPA points-to info can be directly transferred. */
304 if (id->src_cfun->gimple_df
305 && id->src_cfun->gimple_df->ipa_pta
306 && POINTER_TYPE_P (TREE_TYPE (name))
307 && (pi = SSA_NAME_PTR_INFO (name))
308 && !pi->pt.anything)
310 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
311 new_pi->pt = pi->pt;
313 /* So can range-info. */
314 if (!POINTER_TYPE_P (TREE_TYPE (name))
315 && SSA_NAME_RANGE_INFO (name))
316 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
317 SSA_NAME_RANGE_INFO (name));
318 if (SSA_NAME_IS_DEFAULT_DEF (name))
320 /* By inlining function having uninitialized variable, we might
321 extend the lifetime (variable might get reused). This cause
322 ICE in the case we end up extending lifetime of SSA name across
323 abnormal edge, but also increase register pressure.
325 We simply initialize all uninitialized vars by 0 except
326 for case we are inlining to very first BB. We can avoid
327 this for all BBs that are not inside strongly connected
328 regions of the CFG, but this is expensive to test. */
329 if (id->entry_bb
330 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
331 && (!SSA_NAME_VAR (name)
332 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
333 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
334 0)->dest
335 || EDGE_COUNT (id->entry_bb->preds) != 1))
337 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
338 gimple *init_stmt;
339 tree zero = build_zero_cst (TREE_TYPE (new_tree));
341 init_stmt = gimple_build_assign (new_tree, zero);
342 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
343 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
345 else
347 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
348 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
352 else
353 insert_decl_map (id, name, new_tree);
354 return new_tree;
357 /* Remap DECL during the copying of the BLOCK tree for the function. */
359 tree
360 remap_decl (tree decl, copy_body_data *id)
362 tree *n;
364 /* We only remap local variables in the current function. */
366 /* See if we have remapped this declaration. */
368 n = id->decl_map->get (decl);
370 if (!n && processing_debug_stmt)
372 processing_debug_stmt = -1;
373 return decl;
376 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
377 necessary DECLs have already been remapped and we do not want to duplicate
378 a decl coming from outside of the sequence we are copying. */
379 if (!n
380 && id->prevent_decl_creation_for_types
381 && id->remapping_type_depth > 0
382 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
383 return decl;
385 /* If we didn't already have an equivalent for this declaration, create one
386 now. */
387 if (!n)
389 /* Make a copy of the variable or label. */
390 tree t = id->copy_decl (decl, id);
392 /* Remember it, so that if we encounter this local entity again
393 we can reuse this copy. Do this early because remap_type may
394 need this decl for TYPE_STUB_DECL. */
395 insert_decl_map (id, decl, t);
397 if (!DECL_P (t))
398 return t;
400 /* Remap types, if necessary. */
401 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
402 if (TREE_CODE (t) == TYPE_DECL)
404 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
406 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
407 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
408 is not set on the TYPE_DECL, for example in LTO mode. */
409 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
411 tree x = build_variant_type_copy (TREE_TYPE (t));
412 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
413 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
414 DECL_ORIGINAL_TYPE (t) = x;
418 /* Remap sizes as necessary. */
419 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
420 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
422 /* If fields, do likewise for offset and qualifier. */
423 if (TREE_CODE (t) == FIELD_DECL)
425 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
426 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
427 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
430 return t;
433 if (id->do_not_unshare)
434 return *n;
435 else
436 return unshare_expr (*n);
439 static tree
440 remap_type_1 (tree type, copy_body_data *id)
442 tree new_tree, t;
444 /* We do need a copy. build and register it now. If this is a pointer or
445 reference type, remap the designated type and make a new pointer or
446 reference type. */
447 if (TREE_CODE (type) == POINTER_TYPE)
449 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
450 TYPE_MODE (type),
451 TYPE_REF_CAN_ALIAS_ALL (type));
452 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
453 new_tree = build_type_attribute_qual_variant (new_tree,
454 TYPE_ATTRIBUTES (type),
455 TYPE_QUALS (type));
456 insert_decl_map (id, type, new_tree);
457 return new_tree;
459 else if (TREE_CODE (type) == REFERENCE_TYPE)
461 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
462 TYPE_MODE (type),
463 TYPE_REF_CAN_ALIAS_ALL (type));
464 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
465 new_tree = build_type_attribute_qual_variant (new_tree,
466 TYPE_ATTRIBUTES (type),
467 TYPE_QUALS (type));
468 insert_decl_map (id, type, new_tree);
469 return new_tree;
471 else
472 new_tree = copy_node (type);
474 insert_decl_map (id, type, new_tree);
476 /* This is a new type, not a copy of an old type. Need to reassociate
477 variants. We can handle everything except the main variant lazily. */
478 t = TYPE_MAIN_VARIANT (type);
479 if (type != t)
481 t = remap_type (t, id);
482 TYPE_MAIN_VARIANT (new_tree) = t;
483 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
484 TYPE_NEXT_VARIANT (t) = new_tree;
486 else
488 TYPE_MAIN_VARIANT (new_tree) = new_tree;
489 TYPE_NEXT_VARIANT (new_tree) = NULL;
492 if (TYPE_STUB_DECL (type))
493 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
495 /* Lazily create pointer and reference types. */
496 TYPE_POINTER_TO (new_tree) = NULL;
497 TYPE_REFERENCE_TO (new_tree) = NULL;
499 /* Copy all types that may contain references to local variables; be sure to
500 preserve sharing in between type and its main variant when possible. */
501 switch (TREE_CODE (new_tree))
503 case INTEGER_TYPE:
504 case REAL_TYPE:
505 case FIXED_POINT_TYPE:
506 case ENUMERAL_TYPE:
507 case BOOLEAN_TYPE:
508 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
510 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
511 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
513 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
514 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
516 else
518 t = TYPE_MIN_VALUE (new_tree);
519 if (t && TREE_CODE (t) != INTEGER_CST)
520 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
522 t = TYPE_MAX_VALUE (new_tree);
523 if (t && TREE_CODE (t) != INTEGER_CST)
524 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
526 return new_tree;
528 case FUNCTION_TYPE:
529 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
530 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
531 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
532 else
533 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
534 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
535 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
536 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
537 else
538 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
539 return new_tree;
541 case ARRAY_TYPE:
542 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
543 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
544 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
545 else
546 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
548 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
550 gcc_checking_assert (TYPE_DOMAIN (type)
551 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
552 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
554 else
556 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
557 /* For array bounds where we have decided not to copy over the bounds
558 variable which isn't used in OpenMP/OpenACC region, change them to
559 an uninitialized VAR_DECL temporary. */
560 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
561 && id->adjust_array_error_bounds
562 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
564 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
565 DECL_ATTRIBUTES (v)
566 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
567 DECL_ATTRIBUTES (v));
568 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
571 break;
573 case RECORD_TYPE:
574 case UNION_TYPE:
575 case QUAL_UNION_TYPE:
576 if (TYPE_MAIN_VARIANT (type) != type
577 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
578 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
579 else
581 tree f, nf = NULL;
583 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
585 t = remap_decl (f, id);
586 DECL_CONTEXT (t) = new_tree;
587 DECL_CHAIN (t) = nf;
588 nf = t;
590 TYPE_FIELDS (new_tree) = nreverse (nf);
592 break;
594 case OFFSET_TYPE:
595 default:
596 /* Shouldn't have been thought variable sized. */
597 gcc_unreachable ();
600 /* All variants of type share the same size, so use the already remaped data. */
601 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
603 tree s = TYPE_SIZE (type);
604 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
605 tree su = TYPE_SIZE_UNIT (type);
606 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
607 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
608 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
609 || s == mvs);
610 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
611 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
612 || su == mvsu);
613 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
614 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
616 else
618 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
619 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
622 return new_tree;
625 /* Helper function for remap_type_2, called through walk_tree. */
627 static tree
628 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
630 copy_body_data *id = (copy_body_data *) data;
632 if (TYPE_P (*tp))
633 *walk_subtrees = 0;
635 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
636 return *tp;
638 return NULL_TREE;
641 /* Return true if TYPE needs to be remapped because remap_decl on any
642 needed embedded decl returns something other than that decl. */
644 static bool
645 remap_type_2 (tree type, copy_body_data *id)
647 tree t;
649 #define RETURN_TRUE_IF_VAR(T) \
650 do \
652 tree _t = (T); \
653 if (_t) \
655 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
656 return true; \
657 if (!TYPE_SIZES_GIMPLIFIED (type) \
658 && walk_tree (&_t, remap_type_3, id, NULL)) \
659 return true; \
662 while (0)
664 switch (TREE_CODE (type))
666 case POINTER_TYPE:
667 case REFERENCE_TYPE:
668 case FUNCTION_TYPE:
669 case METHOD_TYPE:
670 return remap_type_2 (TREE_TYPE (type), id);
672 case INTEGER_TYPE:
673 case REAL_TYPE:
674 case FIXED_POINT_TYPE:
675 case ENUMERAL_TYPE:
676 case BOOLEAN_TYPE:
677 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
678 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
679 return false;
681 case ARRAY_TYPE:
682 if (remap_type_2 (TREE_TYPE (type), id)
683 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
684 return true;
685 break;
687 case RECORD_TYPE:
688 case UNION_TYPE:
689 case QUAL_UNION_TYPE:
690 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
691 if (TREE_CODE (t) == FIELD_DECL)
693 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
694 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
695 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
696 if (TREE_CODE (type) == QUAL_UNION_TYPE)
697 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
699 break;
701 default:
702 return false;
705 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
706 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
707 return false;
708 #undef RETURN_TRUE_IF_VAR
711 tree
712 remap_type (tree type, copy_body_data *id)
714 tree *node;
715 tree tmp;
717 if (type == NULL)
718 return type;
720 /* See if we have remapped this type. */
721 node = id->decl_map->get (type);
722 if (node)
723 return *node;
725 /* The type only needs remapping if it's variably modified. */
726 if (! variably_modified_type_p (type, id->src_fn)
727 /* Don't remap if copy_decl method doesn't always return a new
728 decl and for all embedded decls returns the passed in decl. */
729 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
731 insert_decl_map (id, type, type);
732 return type;
735 id->remapping_type_depth++;
736 tmp = remap_type_1 (type, id);
737 id->remapping_type_depth--;
739 return tmp;
742 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
744 static bool
745 can_be_nonlocal (tree decl, copy_body_data *id)
747 /* We cannot duplicate function decls. */
748 if (TREE_CODE (decl) == FUNCTION_DECL)
749 return true;
751 /* Local static vars must be non-local or we get multiple declaration
752 problems. */
753 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
754 return true;
756 return false;
759 static tree
760 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
761 copy_body_data *id)
763 tree old_var;
764 tree new_decls = NULL_TREE;
766 /* Remap its variables. */
767 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
769 tree new_var;
771 if (can_be_nonlocal (old_var, id))
773 /* We need to add this variable to the local decls as otherwise
774 nothing else will do so. */
775 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
776 add_local_decl (cfun, old_var);
777 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
778 && !DECL_IGNORED_P (old_var)
779 && nonlocalized_list)
780 vec_safe_push (*nonlocalized_list, old_var);
781 continue;
784 /* Remap the variable. */
785 new_var = remap_decl (old_var, id);
787 /* If we didn't remap this variable, we can't mess with its
788 TREE_CHAIN. If we remapped this variable to the return slot, it's
789 already declared somewhere else, so don't declare it here. */
791 if (new_var == id->retvar)
793 else if (!new_var)
795 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
796 && !DECL_IGNORED_P (old_var)
797 && nonlocalized_list)
798 vec_safe_push (*nonlocalized_list, old_var);
800 else
802 gcc_assert (DECL_P (new_var));
803 DECL_CHAIN (new_var) = new_decls;
804 new_decls = new_var;
806 /* Also copy value-expressions. */
807 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
809 tree tem = DECL_VALUE_EXPR (new_var);
810 bool old_regimplify = id->regimplify;
811 id->remapping_type_depth++;
812 walk_tree (&tem, copy_tree_body_r, id, NULL);
813 id->remapping_type_depth--;
814 id->regimplify = old_regimplify;
815 SET_DECL_VALUE_EXPR (new_var, tem);
820 return nreverse (new_decls);
823 /* Copy the BLOCK to contain remapped versions of the variables
824 therein. And hook the new block into the block-tree. */
826 static void
827 remap_block (tree *block, copy_body_data *id)
829 tree old_block;
830 tree new_block;
832 /* Make the new block. */
833 old_block = *block;
834 new_block = make_node (BLOCK);
835 TREE_USED (new_block) = TREE_USED (old_block);
836 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
837 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
838 BLOCK_NONLOCALIZED_VARS (new_block)
839 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
840 *block = new_block;
842 /* Remap its variables. */
843 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
844 &BLOCK_NONLOCALIZED_VARS (new_block),
845 id);
847 if (id->transform_lang_insert_block)
848 id->transform_lang_insert_block (new_block);
850 /* Remember the remapped block. */
851 insert_decl_map (id, old_block, new_block);
854 /* Copy the whole block tree and root it in id->block. */
856 static tree
857 remap_blocks (tree block, copy_body_data *id)
859 tree t;
860 tree new_tree = block;
862 if (!block)
863 return NULL;
865 remap_block (&new_tree, id);
866 gcc_assert (new_tree != block);
867 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
868 prepend_lexical_block (new_tree, remap_blocks (t, id));
869 /* Blocks are in arbitrary order, but make things slightly prettier and do
870 not swap order when producing a copy. */
871 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
872 return new_tree;
875 /* Remap the block tree rooted at BLOCK to nothing. */
877 static void
878 remap_blocks_to_null (tree block, copy_body_data *id)
880 tree t;
881 insert_decl_map (id, block, NULL_TREE);
882 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
883 remap_blocks_to_null (t, id);
886 /* Remap the location info pointed to by LOCUS. */
888 static location_t
889 remap_location (location_t locus, copy_body_data *id)
891 if (LOCATION_BLOCK (locus))
893 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
894 gcc_assert (n);
895 if (*n)
896 return set_block (locus, *n);
899 locus = LOCATION_LOCUS (locus);
901 if (locus != UNKNOWN_LOCATION && id->block)
902 return set_block (locus, id->block);
904 return locus;
907 static void
908 copy_statement_list (tree *tp)
910 tree_stmt_iterator oi, ni;
911 tree new_tree;
913 new_tree = alloc_stmt_list ();
914 ni = tsi_start (new_tree);
915 oi = tsi_start (*tp);
916 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
917 *tp = new_tree;
919 for (; !tsi_end_p (oi); tsi_next (&oi))
921 tree stmt = tsi_stmt (oi);
922 if (TREE_CODE (stmt) == STATEMENT_LIST)
923 /* This copy is not redundant; tsi_link_after will smash this
924 STATEMENT_LIST into the end of the one we're building, and we
925 don't want to do that with the original. */
926 copy_statement_list (&stmt);
927 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
931 static void
932 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
934 tree block = BIND_EXPR_BLOCK (*tp);
935 /* Copy (and replace) the statement. */
936 copy_tree_r (tp, walk_subtrees, NULL);
937 if (block)
939 remap_block (&block, id);
940 BIND_EXPR_BLOCK (*tp) = block;
943 if (BIND_EXPR_VARS (*tp))
944 /* This will remap a lot of the same decls again, but this should be
945 harmless. */
946 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
950 /* Create a new gimple_seq by remapping all the statements in BODY
951 using the inlining information in ID. */
953 static gimple_seq
954 remap_gimple_seq (gimple_seq body, copy_body_data *id)
956 gimple_stmt_iterator si;
957 gimple_seq new_body = NULL;
959 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
961 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
962 gimple_seq_add_seq (&new_body, new_stmts);
965 return new_body;
969 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
970 block using the mapping information in ID. */
972 static gimple *
973 copy_gimple_bind (gbind *stmt, copy_body_data *id)
975 gimple *new_bind;
976 tree new_block, new_vars;
977 gimple_seq body, new_body;
979 /* Copy the statement. Note that we purposely don't use copy_stmt
980 here because we need to remap statements as we copy. */
981 body = gimple_bind_body (stmt);
982 new_body = remap_gimple_seq (body, id);
984 new_block = gimple_bind_block (stmt);
985 if (new_block)
986 remap_block (&new_block, id);
988 /* This will remap a lot of the same decls again, but this should be
989 harmless. */
990 new_vars = gimple_bind_vars (stmt);
991 if (new_vars)
992 new_vars = remap_decls (new_vars, NULL, id);
994 new_bind = gimple_build_bind (new_vars, new_body, new_block);
996 return new_bind;
999 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
1001 static bool
1002 is_parm (tree decl)
1004 if (TREE_CODE (decl) == SSA_NAME)
1006 decl = SSA_NAME_VAR (decl);
1007 if (!decl)
1008 return false;
1011 return (TREE_CODE (decl) == PARM_DECL);
1014 /* Remap the dependence CLIQUE from the source to the destination function
1015 as specified in ID. */
1017 static unsigned short
1018 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1020 if (clique == 0 || processing_debug_stmt)
1021 return 0;
1022 if (!id->dependence_map)
1023 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1024 bool existed;
1025 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1026 if (!existed)
1028 /* Clique 1 is reserved for local ones set by PTA. */
1029 if (cfun->last_clique == 0)
1030 cfun->last_clique = 1;
1031 newc = ++cfun->last_clique;
1033 return newc;
1036 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1037 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1038 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1039 recursing into the children nodes of *TP. */
1041 static tree
1042 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1044 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1045 copy_body_data *id = (copy_body_data *) wi_p->info;
1046 tree fn = id->src_fn;
1048 /* For recursive invocations this is no longer the LHS itself. */
1049 bool is_lhs = wi_p->is_lhs;
1050 wi_p->is_lhs = false;
1052 if (TREE_CODE (*tp) == SSA_NAME)
1054 *tp = remap_ssa_name (*tp, id);
1055 *walk_subtrees = 0;
1056 if (is_lhs)
1057 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1058 return NULL;
1060 else if (auto_var_in_fn_p (*tp, fn))
1062 /* Local variables and labels need to be replaced by equivalent
1063 variables. We don't want to copy static variables; there's
1064 only one of those, no matter how many times we inline the
1065 containing function. Similarly for globals from an outer
1066 function. */
1067 tree new_decl;
1069 /* Remap the declaration. */
1070 new_decl = remap_decl (*tp, id);
1071 gcc_assert (new_decl);
1072 /* Replace this variable with the copy. */
1073 STRIP_TYPE_NOPS (new_decl);
1074 /* ??? The C++ frontend uses void * pointer zero to initialize
1075 any other type. This confuses the middle-end type verification.
1076 As cloned bodies do not go through gimplification again the fixup
1077 there doesn't trigger. */
1078 if (TREE_CODE (new_decl) == INTEGER_CST
1079 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1080 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1081 *tp = new_decl;
1082 *walk_subtrees = 0;
1084 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1085 gcc_unreachable ();
1086 else if (TREE_CODE (*tp) == SAVE_EXPR)
1087 gcc_unreachable ();
1088 else if (TREE_CODE (*tp) == LABEL_DECL
1089 && (!DECL_CONTEXT (*tp)
1090 || decl_function_context (*tp) == id->src_fn))
1091 /* These may need to be remapped for EH handling. */
1092 *tp = remap_decl (*tp, id);
1093 else if (TREE_CODE (*tp) == FIELD_DECL)
1095 /* If the enclosing record type is variably_modified_type_p, the field
1096 has already been remapped. Otherwise, it need not be. */
1097 tree *n = id->decl_map->get (*tp);
1098 if (n)
1099 *tp = *n;
1100 *walk_subtrees = 0;
1102 else if (TYPE_P (*tp))
1103 /* Types may need remapping as well. */
1104 *tp = remap_type (*tp, id);
1105 else if (CONSTANT_CLASS_P (*tp))
1107 /* If this is a constant, we have to copy the node iff the type
1108 will be remapped. copy_tree_r will not copy a constant. */
1109 tree new_type = remap_type (TREE_TYPE (*tp), id);
1111 if (new_type == TREE_TYPE (*tp))
1112 *walk_subtrees = 0;
1114 else if (TREE_CODE (*tp) == INTEGER_CST)
1115 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1116 else
1118 *tp = copy_node (*tp);
1119 TREE_TYPE (*tp) = new_type;
1122 else
1124 /* Otherwise, just copy the node. Note that copy_tree_r already
1125 knows not to copy VAR_DECLs, etc., so this is safe. */
1127 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1129 /* We need to re-canonicalize MEM_REFs from inline substitutions
1130 that can happen when a pointer argument is an ADDR_EXPR.
1131 Recurse here manually to allow that. */
1132 tree ptr = TREE_OPERAND (*tp, 0);
1133 tree type = remap_type (TREE_TYPE (*tp), id);
1134 tree old = *tp;
1135 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1136 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1137 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1138 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1139 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1140 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1142 MR_DEPENDENCE_CLIQUE (*tp)
1143 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1144 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1146 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1147 remapped a parameter as the property might be valid only
1148 for the parameter itself. */
1149 if (TREE_THIS_NOTRAP (old)
1150 && (!is_parm (TREE_OPERAND (old, 0))
1151 || (!id->transform_parameter && is_parm (ptr))))
1152 TREE_THIS_NOTRAP (*tp) = 1;
1153 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1154 *walk_subtrees = 0;
1155 return NULL;
1158 /* Here is the "usual case". Copy this tree node, and then
1159 tweak some special cases. */
1160 copy_tree_r (tp, walk_subtrees, NULL);
1162 if (TREE_CODE (*tp) != OMP_CLAUSE)
1163 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1165 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1167 /* The copied TARGET_EXPR has never been expanded, even if the
1168 original node was expanded already. */
1169 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1170 TREE_OPERAND (*tp, 3) = NULL_TREE;
1172 else if (TREE_CODE (*tp) == ADDR_EXPR)
1174 /* Variable substitution need not be simple. In particular,
1175 the MEM_REF substitution above. Make sure that
1176 TREE_CONSTANT and friends are up-to-date. */
1177 int invariant = is_gimple_min_invariant (*tp);
1178 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1179 recompute_tree_invariant_for_addr_expr (*tp);
1181 /* If this used to be invariant, but is not any longer,
1182 then regimplification is probably needed. */
1183 if (invariant && !is_gimple_min_invariant (*tp))
1184 id->regimplify = true;
1186 *walk_subtrees = 0;
1190 /* Update the TREE_BLOCK for the cloned expr. */
1191 if (EXPR_P (*tp))
1193 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1194 tree old_block = TREE_BLOCK (*tp);
1195 if (old_block)
1197 tree *n;
1198 n = id->decl_map->get (TREE_BLOCK (*tp));
1199 if (n)
1200 new_block = *n;
1202 TREE_SET_BLOCK (*tp, new_block);
1205 /* Keep iterating. */
1206 return NULL_TREE;
1210 /* Called from copy_body_id via walk_tree. DATA is really a
1211 `copy_body_data *'. */
1213 tree
1214 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1216 copy_body_data *id = (copy_body_data *) data;
1217 tree fn = id->src_fn;
1218 tree new_block;
1220 /* Begin by recognizing trees that we'll completely rewrite for the
1221 inlining context. Our output for these trees is completely
1222 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1223 into an edge). Further down, we'll handle trees that get
1224 duplicated and/or tweaked. */
1226 /* When requested, RETURN_EXPRs should be transformed to just the
1227 contained MODIFY_EXPR. The branch semantics of the return will
1228 be handled elsewhere by manipulating the CFG rather than a statement. */
1229 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1231 tree assignment = TREE_OPERAND (*tp, 0);
1233 /* If we're returning something, just turn that into an
1234 assignment into the equivalent of the original RESULT_DECL.
1235 If the "assignment" is just the result decl, the result
1236 decl has already been set (e.g. a recent "foo (&result_decl,
1237 ...)"); just toss the entire RETURN_EXPR. */
1238 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1240 /* Replace the RETURN_EXPR with (a copy of) the
1241 MODIFY_EXPR hanging underneath. */
1242 *tp = copy_node (assignment);
1244 else /* Else the RETURN_EXPR returns no value. */
1246 *tp = NULL;
1247 return (tree) (void *)1;
1250 else if (TREE_CODE (*tp) == SSA_NAME)
1252 *tp = remap_ssa_name (*tp, id);
1253 *walk_subtrees = 0;
1254 return NULL;
1257 /* Local variables and labels need to be replaced by equivalent
1258 variables. We don't want to copy static variables; there's only
1259 one of those, no matter how many times we inline the containing
1260 function. Similarly for globals from an outer function. */
1261 else if (auto_var_in_fn_p (*tp, fn))
1263 tree new_decl;
1265 /* Remap the declaration. */
1266 new_decl = remap_decl (*tp, id);
1267 gcc_assert (new_decl);
1268 /* Replace this variable with the copy. */
1269 STRIP_TYPE_NOPS (new_decl);
1270 *tp = new_decl;
1271 *walk_subtrees = 0;
1273 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1274 copy_statement_list (tp);
1275 else if (TREE_CODE (*tp) == SAVE_EXPR
1276 || TREE_CODE (*tp) == TARGET_EXPR)
1277 remap_save_expr (tp, id->decl_map, walk_subtrees);
1278 else if (TREE_CODE (*tp) == LABEL_DECL
1279 && (! DECL_CONTEXT (*tp)
1280 || decl_function_context (*tp) == id->src_fn))
1281 /* These may need to be remapped for EH handling. */
1282 *tp = remap_decl (*tp, id);
1283 else if (TREE_CODE (*tp) == BIND_EXPR)
1284 copy_bind_expr (tp, walk_subtrees, id);
1285 /* Types may need remapping as well. */
1286 else if (TYPE_P (*tp))
1287 *tp = remap_type (*tp, id);
1289 /* If this is a constant, we have to copy the node iff the type will be
1290 remapped. copy_tree_r will not copy a constant. */
1291 else if (CONSTANT_CLASS_P (*tp))
1293 tree new_type = remap_type (TREE_TYPE (*tp), id);
1295 if (new_type == TREE_TYPE (*tp))
1296 *walk_subtrees = 0;
1298 else if (TREE_CODE (*tp) == INTEGER_CST)
1299 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1300 else
1302 *tp = copy_node (*tp);
1303 TREE_TYPE (*tp) = new_type;
1307 /* Otherwise, just copy the node. Note that copy_tree_r already
1308 knows not to copy VAR_DECLs, etc., so this is safe. */
1309 else
1311 /* Here we handle trees that are not completely rewritten.
1312 First we detect some inlining-induced bogosities for
1313 discarding. */
1314 if (TREE_CODE (*tp) == MODIFY_EXPR
1315 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1316 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1318 /* Some assignments VAR = VAR; don't generate any rtl code
1319 and thus don't count as variable modification. Avoid
1320 keeping bogosities like 0 = 0. */
1321 tree decl = TREE_OPERAND (*tp, 0), value;
1322 tree *n;
1324 n = id->decl_map->get (decl);
1325 if (n)
1327 value = *n;
1328 STRIP_TYPE_NOPS (value);
1329 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1331 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1332 return copy_tree_body_r (tp, walk_subtrees, data);
1336 else if (TREE_CODE (*tp) == INDIRECT_REF)
1338 /* Get rid of *& from inline substitutions that can happen when a
1339 pointer argument is an ADDR_EXPR. */
1340 tree decl = TREE_OPERAND (*tp, 0);
1341 tree *n = id->decl_map->get (decl);
1342 if (n)
1344 /* If we happen to get an ADDR_EXPR in n->value, strip
1345 it manually here as we'll eventually get ADDR_EXPRs
1346 which lie about their types pointed to. In this case
1347 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1348 but we absolutely rely on that. As fold_indirect_ref
1349 does other useful transformations, try that first, though. */
1350 tree type = TREE_TYPE (*tp);
1351 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1352 tree old = *tp;
1353 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1354 if (! *tp)
1356 type = remap_type (type, id);
1357 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1360 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1361 /* ??? We should either assert here or build
1362 a VIEW_CONVERT_EXPR instead of blindly leaking
1363 incompatible types to our IL. */
1364 if (! *tp)
1365 *tp = TREE_OPERAND (ptr, 0);
1367 else
1369 *tp = build1 (INDIRECT_REF, type, ptr);
1370 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1371 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1372 TREE_READONLY (*tp) = TREE_READONLY (old);
1373 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1374 have remapped a parameter as the property might be
1375 valid only for the parameter itself. */
1376 if (TREE_THIS_NOTRAP (old)
1377 && (!is_parm (TREE_OPERAND (old, 0))
1378 || (!id->transform_parameter && is_parm (ptr))))
1379 TREE_THIS_NOTRAP (*tp) = 1;
1382 *walk_subtrees = 0;
1383 return NULL;
1386 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1388 /* We need to re-canonicalize MEM_REFs from inline substitutions
1389 that can happen when a pointer argument is an ADDR_EXPR.
1390 Recurse here manually to allow that. */
1391 tree ptr = TREE_OPERAND (*tp, 0);
1392 tree type = remap_type (TREE_TYPE (*tp), id);
1393 tree old = *tp;
1394 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1395 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1396 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1397 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1398 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1399 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1401 MR_DEPENDENCE_CLIQUE (*tp)
1402 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1403 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1405 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1406 remapped a parameter as the property might be valid only
1407 for the parameter itself. */
1408 if (TREE_THIS_NOTRAP (old)
1409 && (!is_parm (TREE_OPERAND (old, 0))
1410 || (!id->transform_parameter && is_parm (ptr))))
1411 TREE_THIS_NOTRAP (*tp) = 1;
1412 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1413 *walk_subtrees = 0;
1414 return NULL;
1417 /* Here is the "usual case". Copy this tree node, and then
1418 tweak some special cases. */
1419 copy_tree_r (tp, walk_subtrees, NULL);
1421 /* If EXPR has block defined, map it to newly constructed block.
1422 When inlining we want EXPRs without block appear in the block
1423 of function call if we are not remapping a type. */
1424 if (EXPR_P (*tp))
1426 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1427 if (TREE_BLOCK (*tp))
1429 tree *n;
1430 n = id->decl_map->get (TREE_BLOCK (*tp));
1431 if (n)
1432 new_block = *n;
1434 TREE_SET_BLOCK (*tp, new_block);
1437 if (TREE_CODE (*tp) != OMP_CLAUSE)
1438 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1440 /* The copied TARGET_EXPR has never been expanded, even if the
1441 original node was expanded already. */
1442 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1444 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1445 TREE_OPERAND (*tp, 3) = NULL_TREE;
1448 /* Variable substitution need not be simple. In particular, the
1449 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1450 and friends are up-to-date. */
1451 else if (TREE_CODE (*tp) == ADDR_EXPR)
1453 int invariant = is_gimple_min_invariant (*tp);
1454 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1456 /* Handle the case where we substituted an INDIRECT_REF
1457 into the operand of the ADDR_EXPR. */
1458 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1459 && !id->do_not_fold)
1461 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1462 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1463 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1464 *tp = t;
1466 else
1467 recompute_tree_invariant_for_addr_expr (*tp);
1469 /* If this used to be invariant, but is not any longer,
1470 then regimplification is probably needed. */
1471 if (invariant && !is_gimple_min_invariant (*tp))
1472 id->regimplify = true;
1474 *walk_subtrees = 0;
1478 /* Keep iterating. */
1479 return NULL_TREE;
1482 /* Helper for remap_gimple_stmt. Given an EH region number for the
1483 source function, map that to the duplicate EH region number in
1484 the destination function. */
1486 static int
1487 remap_eh_region_nr (int old_nr, copy_body_data *id)
1489 eh_region old_r, new_r;
1491 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1492 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1494 return new_r->index;
1497 /* Similar, but operate on INTEGER_CSTs. */
1499 static tree
1500 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1502 int old_nr, new_nr;
1504 old_nr = tree_to_shwi (old_t_nr);
1505 new_nr = remap_eh_region_nr (old_nr, id);
1507 return build_int_cst (integer_type_node, new_nr);
1510 /* Helper for copy_bb. Remap statement STMT using the inlining
1511 information in ID. Return the new statement copy. */
1513 static gimple_seq
1514 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1516 gimple *copy = NULL;
1517 struct walk_stmt_info wi;
1518 bool skip_first = false;
1519 gimple_seq stmts = NULL;
1521 if (is_gimple_debug (stmt)
1522 && (gimple_debug_nonbind_marker_p (stmt)
1523 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1524 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1525 return NULL;
1527 /* Begin by recognizing trees that we'll completely rewrite for the
1528 inlining context. Our output for these trees is completely
1529 different from our input (e.g. RETURN_EXPR is deleted and morphs
1530 into an edge). Further down, we'll handle trees that get
1531 duplicated and/or tweaked. */
1533 /* When requested, GIMPLE_RETURN should be transformed to just the
1534 contained GIMPLE_ASSIGN. The branch semantics of the return will
1535 be handled elsewhere by manipulating the CFG rather than the
1536 statement. */
1537 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1539 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1541 /* If we're returning something, just turn that into an
1542 assignment to the equivalent of the original RESULT_DECL.
1543 If RETVAL is just the result decl, the result decl has
1544 already been set (e.g. a recent "foo (&result_decl, ...)");
1545 just toss the entire GIMPLE_RETURN. */
1546 if (retval
1547 && (TREE_CODE (retval) != RESULT_DECL
1548 && (TREE_CODE (retval) != SSA_NAME
1549 || ! SSA_NAME_VAR (retval)
1550 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1552 copy = gimple_build_assign (id->do_not_unshare
1553 ? id->retvar : unshare_expr (id->retvar),
1554 retval);
1555 /* id->retvar is already substituted. Skip it on later remapping. */
1556 skip_first = true;
1558 else
1559 return NULL;
1561 else if (gimple_has_substatements (stmt))
1563 gimple_seq s1, s2;
1565 /* When cloning bodies from the C++ front end, we will be handed bodies
1566 in High GIMPLE form. Handle here all the High GIMPLE statements that
1567 have embedded statements. */
1568 switch (gimple_code (stmt))
1570 case GIMPLE_BIND:
1571 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1572 break;
1574 case GIMPLE_CATCH:
1576 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1577 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1578 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1580 break;
1582 case GIMPLE_EH_FILTER:
1583 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1584 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1585 break;
1587 case GIMPLE_TRY:
1588 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1589 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1590 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1591 break;
1593 case GIMPLE_WITH_CLEANUP_EXPR:
1594 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1595 copy = gimple_build_wce (s1);
1596 break;
1598 case GIMPLE_OMP_PARALLEL:
1600 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1601 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1602 copy = gimple_build_omp_parallel
1603 (s1,
1604 gimple_omp_parallel_clauses (omp_par_stmt),
1605 gimple_omp_parallel_child_fn (omp_par_stmt),
1606 gimple_omp_parallel_data_arg (omp_par_stmt));
1608 break;
1610 case GIMPLE_OMP_TASK:
1611 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1612 copy = gimple_build_omp_task
1613 (s1,
1614 gimple_omp_task_clauses (stmt),
1615 gimple_omp_task_child_fn (stmt),
1616 gimple_omp_task_data_arg (stmt),
1617 gimple_omp_task_copy_fn (stmt),
1618 gimple_omp_task_arg_size (stmt),
1619 gimple_omp_task_arg_align (stmt));
1620 break;
1622 case GIMPLE_OMP_FOR:
1623 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1624 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1625 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1626 gimple_omp_for_clauses (stmt),
1627 gimple_omp_for_collapse (stmt), s2);
1629 size_t i;
1630 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1632 gimple_omp_for_set_index (copy, i,
1633 gimple_omp_for_index (stmt, i));
1634 gimple_omp_for_set_initial (copy, i,
1635 gimple_omp_for_initial (stmt, i));
1636 gimple_omp_for_set_final (copy, i,
1637 gimple_omp_for_final (stmt, i));
1638 gimple_omp_for_set_incr (copy, i,
1639 gimple_omp_for_incr (stmt, i));
1640 gimple_omp_for_set_cond (copy, i,
1641 gimple_omp_for_cond (stmt, i));
1644 break;
1646 case GIMPLE_OMP_MASTER:
1647 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1648 copy = gimple_build_omp_master (s1);
1649 break;
1651 case GIMPLE_OMP_TASKGROUP:
1652 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1653 copy = gimple_build_omp_taskgroup
1654 (s1, gimple_omp_taskgroup_clauses (stmt));
1655 break;
1657 case GIMPLE_OMP_ORDERED:
1658 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1659 copy = gimple_build_omp_ordered
1660 (s1,
1661 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1662 break;
1664 case GIMPLE_OMP_SCAN:
1665 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1666 copy = gimple_build_omp_scan
1667 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1668 break;
1670 case GIMPLE_OMP_SECTION:
1671 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1672 copy = gimple_build_omp_section (s1);
1673 break;
1675 case GIMPLE_OMP_SECTIONS:
1676 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1677 copy = gimple_build_omp_sections
1678 (s1, gimple_omp_sections_clauses (stmt));
1679 break;
1681 case GIMPLE_OMP_SINGLE:
1682 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1683 copy = gimple_build_omp_single
1684 (s1, gimple_omp_single_clauses (stmt));
1685 break;
1687 case GIMPLE_OMP_TARGET:
1688 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1689 copy = gimple_build_omp_target
1690 (s1, gimple_omp_target_kind (stmt),
1691 gimple_omp_target_clauses (stmt));
1692 break;
1694 case GIMPLE_OMP_TEAMS:
1695 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1696 copy = gimple_build_omp_teams
1697 (s1, gimple_omp_teams_clauses (stmt));
1698 break;
1700 case GIMPLE_OMP_CRITICAL:
1701 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1702 copy = gimple_build_omp_critical (s1,
1703 gimple_omp_critical_name
1704 (as_a <gomp_critical *> (stmt)),
1705 gimple_omp_critical_clauses
1706 (as_a <gomp_critical *> (stmt)));
1707 break;
1709 case GIMPLE_TRANSACTION:
1711 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1712 gtransaction *new_trans_stmt;
1713 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1714 id);
1715 copy = new_trans_stmt = gimple_build_transaction (s1);
1716 gimple_transaction_set_subcode (new_trans_stmt,
1717 gimple_transaction_subcode (old_trans_stmt));
1718 gimple_transaction_set_label_norm (new_trans_stmt,
1719 gimple_transaction_label_norm (old_trans_stmt));
1720 gimple_transaction_set_label_uninst (new_trans_stmt,
1721 gimple_transaction_label_uninst (old_trans_stmt));
1722 gimple_transaction_set_label_over (new_trans_stmt,
1723 gimple_transaction_label_over (old_trans_stmt));
1725 break;
1727 default:
1728 gcc_unreachable ();
1731 else
1733 if (gimple_assign_copy_p (stmt)
1734 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1735 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1737 /* Here we handle statements that are not completely rewritten.
1738 First we detect some inlining-induced bogosities for
1739 discarding. */
1741 /* Some assignments VAR = VAR; don't generate any rtl code
1742 and thus don't count as variable modification. Avoid
1743 keeping bogosities like 0 = 0. */
1744 tree decl = gimple_assign_lhs (stmt), value;
1745 tree *n;
1747 n = id->decl_map->get (decl);
1748 if (n)
1750 value = *n;
1751 STRIP_TYPE_NOPS (value);
1752 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1753 return NULL;
1757 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1758 in a block that we aren't copying during tree_function_versioning,
1759 just drop the clobber stmt. */
1760 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1762 tree lhs = gimple_assign_lhs (stmt);
1763 if (TREE_CODE (lhs) == MEM_REF
1764 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1766 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1767 if (gimple_bb (def_stmt)
1768 && !bitmap_bit_p (id->blocks_to_copy,
1769 gimple_bb (def_stmt)->index))
1770 return NULL;
1774 /* We do not allow CLOBBERs of handled components. In case
1775 returned value is stored via such handled component, remove
1776 the clobber so stmt verifier is happy. */
1777 if (gimple_clobber_p (stmt)
1778 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1780 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1781 if (!DECL_P (remapped)
1782 && TREE_CODE (remapped) != MEM_REF)
1783 return NULL;
1786 if (gimple_debug_bind_p (stmt))
1788 gdebug *copy
1789 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1790 gimple_debug_bind_get_value (stmt),
1791 stmt);
1792 if (id->reset_location)
1793 gimple_set_location (copy, input_location);
1794 id->debug_stmts.safe_push (copy);
1795 gimple_seq_add_stmt (&stmts, copy);
1796 return stmts;
1798 if (gimple_debug_source_bind_p (stmt))
1800 gdebug *copy = gimple_build_debug_source_bind
1801 (gimple_debug_source_bind_get_var (stmt),
1802 gimple_debug_source_bind_get_value (stmt),
1803 stmt);
1804 if (id->reset_location)
1805 gimple_set_location (copy, input_location);
1806 id->debug_stmts.safe_push (copy);
1807 gimple_seq_add_stmt (&stmts, copy);
1808 return stmts;
1810 if (gimple_debug_nonbind_marker_p (stmt))
1812 /* If the inlined function has too many debug markers,
1813 don't copy them. */
1814 if (id->src_cfun->debug_marker_count
1815 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1816 return stmts;
1818 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1819 if (id->reset_location)
1820 gimple_set_location (copy, input_location);
1821 id->debug_stmts.safe_push (copy);
1822 gimple_seq_add_stmt (&stmts, copy);
1823 return stmts;
1826 /* Create a new deep copy of the statement. */
1827 copy = gimple_copy (stmt);
1829 /* Clear flags that need revisiting. */
1830 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1832 if (gimple_call_tail_p (call_stmt))
1833 gimple_call_set_tail (call_stmt, false);
1834 if (gimple_call_from_thunk_p (call_stmt))
1835 gimple_call_set_from_thunk (call_stmt, false);
1836 if (gimple_call_internal_p (call_stmt))
1837 switch (gimple_call_internal_fn (call_stmt))
1839 case IFN_GOMP_SIMD_LANE:
1840 case IFN_GOMP_SIMD_VF:
1841 case IFN_GOMP_SIMD_LAST_LANE:
1842 case IFN_GOMP_SIMD_ORDERED_START:
1843 case IFN_GOMP_SIMD_ORDERED_END:
1844 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1845 break;
1846 default:
1847 break;
1851 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1852 RESX and EH_DISPATCH. */
1853 if (id->eh_map)
1854 switch (gimple_code (copy))
1856 case GIMPLE_CALL:
1858 tree r, fndecl = gimple_call_fndecl (copy);
1859 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1860 switch (DECL_FUNCTION_CODE (fndecl))
1862 case BUILT_IN_EH_COPY_VALUES:
1863 r = gimple_call_arg (copy, 1);
1864 r = remap_eh_region_tree_nr (r, id);
1865 gimple_call_set_arg (copy, 1, r);
1866 /* FALLTHRU */
1868 case BUILT_IN_EH_POINTER:
1869 case BUILT_IN_EH_FILTER:
1870 r = gimple_call_arg (copy, 0);
1871 r = remap_eh_region_tree_nr (r, id);
1872 gimple_call_set_arg (copy, 0, r);
1873 break;
1875 default:
1876 break;
1879 /* Reset alias info if we didn't apply measures to
1880 keep it valid over inlining by setting DECL_PT_UID. */
1881 if (!id->src_cfun->gimple_df
1882 || !id->src_cfun->gimple_df->ipa_pta)
1883 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1885 break;
1887 case GIMPLE_RESX:
1889 gresx *resx_stmt = as_a <gresx *> (copy);
1890 int r = gimple_resx_region (resx_stmt);
1891 r = remap_eh_region_nr (r, id);
1892 gimple_resx_set_region (resx_stmt, r);
1894 break;
1896 case GIMPLE_EH_DISPATCH:
1898 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1899 int r = gimple_eh_dispatch_region (eh_dispatch);
1900 r = remap_eh_region_nr (r, id);
1901 gimple_eh_dispatch_set_region (eh_dispatch, r);
1903 break;
1905 default:
1906 break;
1910 /* If STMT has a block defined, map it to the newly constructed block. */
1911 if (tree block = gimple_block (copy))
1913 tree *n;
1914 n = id->decl_map->get (block);
1915 gcc_assert (n);
1916 gimple_set_block (copy, *n);
1918 if (id->param_body_adjs)
1920 gimple_seq extra_stmts = NULL;
1921 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
1922 if (!gimple_seq_empty_p (extra_stmts))
1924 memset (&wi, 0, sizeof (wi));
1925 wi.info = id;
1926 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1927 !gsi_end_p (egsi);
1928 gsi_next (&egsi))
1929 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1930 gimple_seq_add_seq (&stmts, extra_stmts);
1934 if (id->reset_location)
1935 gimple_set_location (copy, input_location);
1937 /* Debug statements ought to be rebuilt and not copied. */
1938 gcc_checking_assert (!is_gimple_debug (copy));
1940 /* Remap all the operands in COPY. */
1941 memset (&wi, 0, sizeof (wi));
1942 wi.info = id;
1943 if (skip_first)
1944 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1945 else
1946 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1948 /* Clear the copied virtual operands. We are not remapping them here
1949 but are going to recreate them from scratch. */
1950 if (gimple_has_mem_ops (copy))
1952 gimple_set_vdef (copy, NULL_TREE);
1953 gimple_set_vuse (copy, NULL_TREE);
1956 gimple_seq_add_stmt (&stmts, copy);
1957 return stmts;
1961 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1962 later */
1964 static basic_block
1965 copy_bb (copy_body_data *id, basic_block bb,
1966 profile_count num, profile_count den)
1968 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1969 basic_block copy_basic_block;
1970 tree decl;
1971 basic_block prev;
1973 profile_count::adjust_for_ipa_scaling (&num, &den);
1975 /* Search for previous copied basic block. */
1976 prev = bb->prev_bb;
1977 while (!prev->aux)
1978 prev = prev->prev_bb;
1980 /* create_basic_block() will append every new block to
1981 basic_block_info automatically. */
1982 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1983 copy_basic_block->count = bb->count.apply_scale (num, den);
1985 copy_gsi = gsi_start_bb (copy_basic_block);
1987 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1989 gimple_seq stmts;
1990 gimple *stmt = gsi_stmt (gsi);
1991 gimple *orig_stmt = stmt;
1992 gimple_stmt_iterator stmts_gsi;
1993 bool stmt_added = false;
1995 id->regimplify = false;
1996 stmts = remap_gimple_stmt (stmt, id);
1998 if (gimple_seq_empty_p (stmts))
1999 continue;
2001 seq_gsi = copy_gsi;
2003 for (stmts_gsi = gsi_start (stmts);
2004 !gsi_end_p (stmts_gsi); )
2006 stmt = gsi_stmt (stmts_gsi);
2008 /* Advance iterator now before stmt is moved to seq_gsi. */
2009 gsi_next (&stmts_gsi);
2011 if (gimple_nop_p (stmt))
2012 continue;
2014 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2015 orig_stmt);
2017 /* With return slot optimization we can end up with
2018 non-gimple (foo *)&this->m, fix that here. */
2019 if (is_gimple_assign (stmt)
2020 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2021 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2023 tree new_rhs;
2024 new_rhs = force_gimple_operand_gsi (&seq_gsi,
2025 gimple_assign_rhs1 (stmt),
2026 true, NULL, false,
2027 GSI_CONTINUE_LINKING);
2028 gimple_assign_set_rhs1 (stmt, new_rhs);
2029 id->regimplify = false;
2032 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2034 if (id->regimplify)
2035 gimple_regimplify_operands (stmt, &seq_gsi);
2037 stmt_added = true;
2040 if (!stmt_added)
2041 continue;
2043 /* If copy_basic_block has been empty at the start of this iteration,
2044 call gsi_start_bb again to get at the newly added statements. */
2045 if (gsi_end_p (copy_gsi))
2046 copy_gsi = gsi_start_bb (copy_basic_block);
2047 else
2048 gsi_next (&copy_gsi);
2050 /* Process the new statement. The call to gimple_regimplify_operands
2051 possibly turned the statement into multiple statements, we
2052 need to process all of them. */
2055 tree fn;
2056 gcall *call_stmt;
2058 stmt = gsi_stmt (copy_gsi);
2059 call_stmt = dyn_cast <gcall *> (stmt);
2060 if (call_stmt
2061 && gimple_call_va_arg_pack_p (call_stmt)
2062 && id->call_stmt
2063 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2065 /* __builtin_va_arg_pack () should be replaced by
2066 all arguments corresponding to ... in the caller. */
2067 tree p;
2068 gcall *new_call;
2069 vec<tree> argarray;
2070 size_t nargs = gimple_call_num_args (id->call_stmt);
2071 size_t n;
2073 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2074 nargs--;
2076 /* Create the new array of arguments. */
2077 n = nargs + gimple_call_num_args (call_stmt);
2078 argarray.create (n);
2079 argarray.safe_grow_cleared (n);
2081 /* Copy all the arguments before '...' */
2082 memcpy (argarray.address (),
2083 gimple_call_arg_ptr (call_stmt, 0),
2084 gimple_call_num_args (call_stmt) * sizeof (tree));
2086 /* Append the arguments passed in '...' */
2087 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2088 gimple_call_arg_ptr (id->call_stmt, 0)
2089 + (gimple_call_num_args (id->call_stmt) - nargs),
2090 nargs * sizeof (tree));
2092 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2093 argarray);
2095 argarray.release ();
2097 /* Copy all GIMPLE_CALL flags, location and block, except
2098 GF_CALL_VA_ARG_PACK. */
2099 gimple_call_copy_flags (new_call, call_stmt);
2100 gimple_call_set_va_arg_pack (new_call, false);
2101 /* location includes block. */
2102 gimple_set_location (new_call, gimple_location (stmt));
2103 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2105 gsi_replace (&copy_gsi, new_call, false);
2106 stmt = new_call;
2108 else if (call_stmt
2109 && id->call_stmt
2110 && (decl = gimple_call_fndecl (stmt))
2111 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2113 /* __builtin_va_arg_pack_len () should be replaced by
2114 the number of anonymous arguments. */
2115 size_t nargs = gimple_call_num_args (id->call_stmt);
2116 tree count, p;
2117 gimple *new_stmt;
2119 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2120 nargs--;
2122 if (!gimple_call_lhs (stmt))
2124 /* Drop unused calls. */
2125 gsi_remove (&copy_gsi, false);
2126 continue;
2128 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2130 count = build_int_cst (integer_type_node, nargs);
2131 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2132 gsi_replace (&copy_gsi, new_stmt, false);
2133 stmt = new_stmt;
2135 else if (nargs != 0)
2137 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2138 count = build_int_cst (integer_type_node, nargs);
2139 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2140 PLUS_EXPR, newlhs, count);
2141 gimple_call_set_lhs (stmt, newlhs);
2142 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2145 else if (call_stmt
2146 && id->call_stmt
2147 && gimple_call_internal_p (stmt)
2148 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2150 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2151 gsi_remove (&copy_gsi, false);
2152 continue;
2155 /* Statements produced by inlining can be unfolded, especially
2156 when we constant propagated some operands. We can't fold
2157 them right now for two reasons:
2158 1) folding require SSA_NAME_DEF_STMTs to be correct
2159 2) we can't change function calls to builtins.
2160 So we just mark statement for later folding. We mark
2161 all new statements, instead just statements that has changed
2162 by some nontrivial substitution so even statements made
2163 foldable indirectly are updated. If this turns out to be
2164 expensive, copy_body can be told to watch for nontrivial
2165 changes. */
2166 if (id->statements_to_fold)
2167 id->statements_to_fold->add (stmt);
2169 /* We're duplicating a CALL_EXPR. Find any corresponding
2170 callgraph edges and update or duplicate them. */
2171 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2173 struct cgraph_edge *edge;
2175 switch (id->transform_call_graph_edges)
2177 case CB_CGE_DUPLICATE:
2178 edge = id->src_node->get_edge (orig_stmt);
2179 if (edge)
2181 struct cgraph_edge *old_edge = edge;
2182 profile_count old_cnt = edge->count;
2183 edge = edge->clone (id->dst_node, call_stmt,
2184 gimple_uid (stmt),
2185 num, den,
2186 true);
2188 /* Speculative calls consist of two edges - direct and
2189 indirect. Duplicate the whole thing and distribute
2190 frequencies accordingly. */
2191 if (edge->speculative)
2193 struct cgraph_edge *direct, *indirect;
2194 struct ipa_ref *ref;
2196 gcc_assert (!edge->indirect_unknown_callee);
2197 old_edge->speculative_call_info (direct, indirect, ref);
2199 profile_count indir_cnt = indirect->count;
2200 indirect = indirect->clone (id->dst_node, call_stmt,
2201 gimple_uid (stmt),
2202 num, den,
2203 true);
2205 profile_probability prob
2206 = indir_cnt.probability_in (old_cnt + indir_cnt);
2207 indirect->count
2208 = copy_basic_block->count.apply_probability (prob);
2209 edge->count = copy_basic_block->count - indirect->count;
2210 id->dst_node->clone_reference (ref, stmt);
2212 else
2213 edge->count = copy_basic_block->count;
2215 break;
2217 case CB_CGE_MOVE_CLONES:
2218 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2219 call_stmt);
2220 edge = id->dst_node->get_edge (stmt);
2221 break;
2223 case CB_CGE_MOVE:
2224 edge = id->dst_node->get_edge (orig_stmt);
2225 if (edge)
2226 edge->set_call_stmt (call_stmt);
2227 break;
2229 default:
2230 gcc_unreachable ();
2233 /* Constant propagation on argument done during inlining
2234 may create new direct call. Produce an edge for it. */
2235 if ((!edge
2236 || (edge->indirect_inlining_edge
2237 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2238 && id->dst_node->definition
2239 && (fn = gimple_call_fndecl (stmt)) != NULL)
2241 struct cgraph_node *dest = cgraph_node::get_create (fn);
2243 /* We have missing edge in the callgraph. This can happen
2244 when previous inlining turned an indirect call into a
2245 direct call by constant propagating arguments or we are
2246 producing dead clone (for further cloning). In all
2247 other cases we hit a bug (incorrect node sharing is the
2248 most common reason for missing edges). */
2249 gcc_assert (!dest->definition
2250 || dest->address_taken
2251 || !id->src_node->definition
2252 || !id->dst_node->definition);
2253 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2254 id->dst_node->create_edge_including_clones
2255 (dest, orig_stmt, call_stmt, bb->count,
2256 CIF_ORIGINALLY_INDIRECT_CALL);
2257 else
2258 id->dst_node->create_edge (dest, call_stmt,
2259 bb->count)->inline_failed
2260 = CIF_ORIGINALLY_INDIRECT_CALL;
2261 if (dump_file)
2263 fprintf (dump_file, "Created new direct edge to %s\n",
2264 dest->name ());
2268 notice_special_calls (as_a <gcall *> (stmt));
2271 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2272 id->eh_map, id->eh_lp_nr);
2274 gsi_next (&copy_gsi);
2276 while (!gsi_end_p (copy_gsi));
2278 copy_gsi = gsi_last_bb (copy_basic_block);
2281 return copy_basic_block;
2284 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2285 form is quite easy, since dominator relationship for old basic blocks does
2286 not change.
2288 There is however exception where inlining might change dominator relation
2289 across EH edges from basic block within inlined functions destinating
2290 to landing pads in function we inline into.
2292 The function fills in PHI_RESULTs of such PHI nodes if they refer
2293 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2294 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2295 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2296 set, and this means that there will be no overlapping live ranges
2297 for the underlying symbol.
2299 This might change in future if we allow redirecting of EH edges and
2300 we might want to change way build CFG pre-inlining to include
2301 all the possible edges then. */
2302 static void
2303 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2304 bool can_throw, bool nonlocal_goto)
2306 edge e;
2307 edge_iterator ei;
2309 FOR_EACH_EDGE (e, ei, bb->succs)
2310 if (!e->dest->aux
2311 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2313 gphi *phi;
2314 gphi_iterator si;
2316 if (!nonlocal_goto)
2317 gcc_assert (e->flags & EDGE_EH);
2319 if (!can_throw)
2320 gcc_assert (!(e->flags & EDGE_EH));
2322 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2324 edge re;
2326 phi = si.phi ();
2328 /* For abnormal goto/call edges the receiver can be the
2329 ENTRY_BLOCK. Do not assert this cannot happen. */
2331 gcc_assert ((e->flags & EDGE_EH)
2332 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2334 re = find_edge (ret_bb, e->dest);
2335 gcc_checking_assert (re);
2336 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2337 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2339 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2340 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2345 /* Insert clobbers for automatic variables of inlined ID->src_fn
2346 function at the start of basic block ID->eh_landing_pad_dest. */
2348 static void
2349 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2351 tree var;
2352 basic_block bb = id->eh_landing_pad_dest;
2353 live_vars_map *vars = NULL;
2354 unsigned int cnt = 0;
2355 unsigned int i;
2356 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2357 if (VAR_P (var)
2358 && !DECL_HARD_REGISTER (var)
2359 && !TREE_THIS_VOLATILE (var)
2360 && !DECL_HAS_VALUE_EXPR_P (var)
2361 && !is_gimple_reg (var)
2362 && auto_var_in_fn_p (var, id->src_fn)
2363 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2365 tree *t = id->decl_map->get (var);
2366 if (!t)
2367 continue;
2368 tree new_var = *t;
2369 if (VAR_P (new_var)
2370 && !DECL_HARD_REGISTER (new_var)
2371 && !TREE_THIS_VOLATILE (new_var)
2372 && !DECL_HAS_VALUE_EXPR_P (new_var)
2373 && !is_gimple_reg (new_var)
2374 && auto_var_in_fn_p (new_var, id->dst_fn))
2376 if (vars == NULL)
2377 vars = new live_vars_map;
2378 vars->put (DECL_UID (var), cnt++);
2381 if (vars == NULL)
2382 return;
2384 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2385 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2386 if (VAR_P (var))
2388 edge e;
2389 edge_iterator ei;
2390 bool needed = false;
2391 unsigned int *v = vars->get (DECL_UID (var));
2392 if (v == NULL)
2393 continue;
2394 FOR_EACH_EDGE (e, ei, bb->preds)
2395 if ((e->flags & EDGE_EH) != 0
2396 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2398 basic_block src_bb = (basic_block) e->src->aux;
2400 if (bitmap_bit_p (&live[src_bb->index], *v))
2402 needed = true;
2403 break;
2406 if (needed)
2408 tree new_var = *id->decl_map->get (var);
2409 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2410 tree clobber = build_clobber (TREE_TYPE (new_var));
2411 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2412 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2415 destroy_live_vars (live);
2416 delete vars;
2419 /* Copy edges from BB into its copy constructed earlier, scale profile
2420 accordingly. Edges will be taken care of later. Assume aux
2421 pointers to point to the copies of each BB. Return true if any
2422 debug stmts are left after a statement that must end the basic block. */
2424 static bool
2425 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2426 basic_block ret_bb, basic_block abnormal_goto_dest,
2427 copy_body_data *id)
2429 basic_block new_bb = (basic_block) bb->aux;
2430 edge_iterator ei;
2431 edge old_edge;
2432 gimple_stmt_iterator si;
2433 bool need_debug_cleanup = false;
2435 /* Use the indices from the original blocks to create edges for the
2436 new ones. */
2437 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2438 if (!(old_edge->flags & EDGE_EH))
2440 edge new_edge;
2441 int flags = old_edge->flags;
2442 location_t locus = old_edge->goto_locus;
2444 /* Return edges do get a FALLTHRU flag when they get inlined. */
2445 if (old_edge->dest->index == EXIT_BLOCK
2446 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2447 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2448 flags |= EDGE_FALLTHRU;
2450 new_edge
2451 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2452 new_edge->probability = old_edge->probability;
2453 if (!id->reset_location)
2454 new_edge->goto_locus = remap_location (locus, id);
2457 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2458 return false;
2460 /* When doing function splitting, we must decrease count of the return block
2461 which was previously reachable by block we did not copy. */
2462 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2463 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2464 if (old_edge->src->index != ENTRY_BLOCK
2465 && !old_edge->src->aux)
2466 new_bb->count -= old_edge->count ().apply_scale (num, den);
2468 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2470 gimple *copy_stmt;
2471 bool can_throw, nonlocal_goto;
2473 copy_stmt = gsi_stmt (si);
2474 if (!is_gimple_debug (copy_stmt))
2475 update_stmt (copy_stmt);
2477 /* Do this before the possible split_block. */
2478 gsi_next (&si);
2480 /* If this tree could throw an exception, there are two
2481 cases where we need to add abnormal edge(s): the
2482 tree wasn't in a region and there is a "current
2483 region" in the caller; or the original tree had
2484 EH edges. In both cases split the block after the tree,
2485 and add abnormal edge(s) as needed; we need both
2486 those from the callee and the caller.
2487 We check whether the copy can throw, because the const
2488 propagation can change an INDIRECT_REF which throws
2489 into a COMPONENT_REF which doesn't. If the copy
2490 can throw, the original could also throw. */
2491 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2492 nonlocal_goto
2493 = (stmt_can_make_abnormal_goto (copy_stmt)
2494 && !computed_goto_p (copy_stmt));
2496 if (can_throw || nonlocal_goto)
2498 if (!gsi_end_p (si))
2500 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2501 gsi_next (&si);
2502 if (gsi_end_p (si))
2503 need_debug_cleanup = true;
2505 if (!gsi_end_p (si))
2506 /* Note that bb's predecessor edges aren't necessarily
2507 right at this point; split_block doesn't care. */
2509 edge e = split_block (new_bb, copy_stmt);
2511 new_bb = e->dest;
2512 new_bb->aux = e->src->aux;
2513 si = gsi_start_bb (new_bb);
2517 bool update_probs = false;
2519 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2521 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2522 update_probs = true;
2524 else if (can_throw)
2526 make_eh_edges (copy_stmt);
2527 update_probs = true;
2530 /* EH edges may not match old edges. Copy as much as possible. */
2531 if (update_probs)
2533 edge e;
2534 edge_iterator ei;
2535 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2537 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2538 if ((old_edge->flags & EDGE_EH)
2539 && (e = find_edge (copy_stmt_bb,
2540 (basic_block) old_edge->dest->aux))
2541 && (e->flags & EDGE_EH))
2542 e->probability = old_edge->probability;
2544 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2545 if (e->flags & EDGE_EH)
2547 if (!e->probability.initialized_p ())
2548 e->probability = profile_probability::never ();
2549 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2551 if (id->eh_landing_pad_dest == NULL)
2552 id->eh_landing_pad_dest = e->dest;
2553 else
2554 gcc_assert (id->eh_landing_pad_dest == e->dest);
2560 /* If the call we inline cannot make abnormal goto do not add
2561 additional abnormal edges but only retain those already present
2562 in the original function body. */
2563 if (abnormal_goto_dest == NULL)
2564 nonlocal_goto = false;
2565 if (nonlocal_goto)
2567 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2569 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2570 nonlocal_goto = false;
2571 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2572 in OpenMP regions which aren't allowed to be left abnormally.
2573 So, no need to add abnormal edge in that case. */
2574 else if (is_gimple_call (copy_stmt)
2575 && gimple_call_internal_p (copy_stmt)
2576 && (gimple_call_internal_fn (copy_stmt)
2577 == IFN_ABNORMAL_DISPATCHER)
2578 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2579 nonlocal_goto = false;
2580 else
2581 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2582 EDGE_ABNORMAL);
2585 if ((can_throw || nonlocal_goto)
2586 && gimple_in_ssa_p (cfun))
2587 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2588 can_throw, nonlocal_goto);
2590 return need_debug_cleanup;
2593 /* Copy the PHIs. All blocks and edges are copied, some blocks
2594 was possibly split and new outgoing EH edges inserted.
2595 BB points to the block of original function and AUX pointers links
2596 the original and newly copied blocks. */
2598 static void
2599 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2601 basic_block const new_bb = (basic_block) bb->aux;
2602 edge_iterator ei;
2603 gphi *phi;
2604 gphi_iterator si;
2605 edge new_edge;
2606 bool inserted = false;
2608 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2610 tree res, new_res;
2611 gphi *new_phi;
2613 phi = si.phi ();
2614 res = PHI_RESULT (phi);
2615 new_res = res;
2616 if (!virtual_operand_p (res))
2618 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2619 if (EDGE_COUNT (new_bb->preds) == 0)
2621 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2622 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2624 else
2626 new_phi = create_phi_node (new_res, new_bb);
2627 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2629 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2630 bb);
2631 tree arg;
2632 tree new_arg;
2633 edge_iterator ei2;
2634 location_t locus;
2636 /* When doing partial cloning, we allow PHIs on the entry
2637 block as long as all the arguments are the same.
2638 Find any input edge to see argument to copy. */
2639 if (!old_edge)
2640 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2641 if (!old_edge->src->aux)
2642 break;
2644 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2645 new_arg = arg;
2646 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2647 gcc_assert (new_arg);
2648 /* With return slot optimization we can end up with
2649 non-gimple (foo *)&this->m, fix that here. */
2650 if (TREE_CODE (new_arg) != SSA_NAME
2651 && TREE_CODE (new_arg) != FUNCTION_DECL
2652 && !is_gimple_val (new_arg))
2654 gimple_seq stmts = NULL;
2655 new_arg = force_gimple_operand (new_arg, &stmts, true,
2656 NULL);
2657 gsi_insert_seq_on_edge (new_edge, stmts);
2658 inserted = true;
2660 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2661 if (id->reset_location)
2662 locus = input_location;
2663 else
2664 locus = remap_location (locus, id);
2665 add_phi_arg (new_phi, new_arg, new_edge, locus);
2671 /* Commit the delayed edge insertions. */
2672 if (inserted)
2673 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2674 gsi_commit_one_edge_insert (new_edge, NULL);
2678 /* Wrapper for remap_decl so it can be used as a callback. */
2680 static tree
2681 remap_decl_1 (tree decl, void *data)
2683 return remap_decl (decl, (copy_body_data *) data);
2686 /* Build struct function and associated datastructures for the new clone
2687 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2688 the cfun to the function of new_fndecl (and current_function_decl too). */
2690 static void
2691 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2693 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2695 if (!DECL_ARGUMENTS (new_fndecl))
2696 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2697 if (!DECL_RESULT (new_fndecl))
2698 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2700 /* Register specific tree functions. */
2701 gimple_register_cfg_hooks ();
2703 /* Get clean struct function. */
2704 push_struct_function (new_fndecl);
2706 /* We will rebuild these, so just sanity check that they are empty. */
2707 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2708 gcc_assert (cfun->local_decls == NULL);
2709 gcc_assert (cfun->cfg == NULL);
2710 gcc_assert (cfun->decl == new_fndecl);
2712 /* Copy items we preserve during cloning. */
2713 cfun->static_chain_decl = src_cfun->static_chain_decl;
2714 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2715 cfun->function_end_locus = src_cfun->function_end_locus;
2716 cfun->curr_properties = src_cfun->curr_properties;
2717 cfun->last_verified = src_cfun->last_verified;
2718 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2719 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2720 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2721 cfun->calls_eh_return = src_cfun->calls_eh_return;
2722 cfun->stdarg = src_cfun->stdarg;
2723 cfun->after_inlining = src_cfun->after_inlining;
2724 cfun->can_throw_non_call_exceptions
2725 = src_cfun->can_throw_non_call_exceptions;
2726 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2727 cfun->returns_struct = src_cfun->returns_struct;
2728 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2730 init_empty_tree_cfg ();
2732 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2734 profile_count num = count;
2735 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2736 profile_count::adjust_for_ipa_scaling (&num, &den);
2738 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2739 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2740 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2741 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2742 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2743 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2744 if (src_cfun->eh)
2745 init_eh_for_function ();
2747 if (src_cfun->gimple_df)
2749 init_tree_ssa (cfun);
2750 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2751 if (cfun->gimple_df->in_ssa_p)
2752 init_ssa_operands (cfun);
2756 /* Helper function for copy_cfg_body. Move debug stmts from the end
2757 of NEW_BB to the beginning of successor basic blocks when needed. If the
2758 successor has multiple predecessors, reset them, otherwise keep
2759 their value. */
2761 static void
2762 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2764 edge e;
2765 edge_iterator ei;
2766 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2768 if (gsi_end_p (si)
2769 || gsi_one_before_end_p (si)
2770 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2771 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2772 return;
2774 FOR_EACH_EDGE (e, ei, new_bb->succs)
2776 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2777 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2778 while (is_gimple_debug (gsi_stmt (ssi)))
2780 gimple *stmt = gsi_stmt (ssi);
2781 gdebug *new_stmt;
2782 tree var;
2783 tree value;
2785 /* For the last edge move the debug stmts instead of copying
2786 them. */
2787 if (ei_one_before_end_p (ei))
2789 si = ssi;
2790 gsi_prev (&ssi);
2791 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2793 gimple_debug_bind_reset_value (stmt);
2794 gimple_set_location (stmt, UNKNOWN_LOCATION);
2796 gsi_remove (&si, false);
2797 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2798 continue;
2801 if (gimple_debug_bind_p (stmt))
2803 var = gimple_debug_bind_get_var (stmt);
2804 if (single_pred_p (e->dest))
2806 value = gimple_debug_bind_get_value (stmt);
2807 value = unshare_expr (value);
2808 new_stmt = gimple_build_debug_bind (var, value, stmt);
2810 else
2811 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2813 else if (gimple_debug_source_bind_p (stmt))
2815 var = gimple_debug_source_bind_get_var (stmt);
2816 value = gimple_debug_source_bind_get_value (stmt);
2817 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2819 else if (gimple_debug_nonbind_marker_p (stmt))
2820 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2821 else
2822 gcc_unreachable ();
2823 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2824 id->debug_stmts.safe_push (new_stmt);
2825 gsi_prev (&ssi);
2830 /* Make a copy of the sub-loops of SRC_PARENT and place them
2831 as siblings of DEST_PARENT. */
2833 static void
2834 copy_loops (copy_body_data *id,
2835 class loop *dest_parent, class loop *src_parent)
2837 class loop *src_loop = src_parent->inner;
2838 while (src_loop)
2840 if (!id->blocks_to_copy
2841 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2843 class loop *dest_loop = alloc_loop ();
2845 /* Assign the new loop its header and latch and associate
2846 those with the new loop. */
2847 dest_loop->header = (basic_block)src_loop->header->aux;
2848 dest_loop->header->loop_father = dest_loop;
2849 if (src_loop->latch != NULL)
2851 dest_loop->latch = (basic_block)src_loop->latch->aux;
2852 dest_loop->latch->loop_father = dest_loop;
2855 /* Copy loop meta-data. */
2856 copy_loop_info (src_loop, dest_loop);
2857 if (dest_loop->unroll)
2858 cfun->has_unroll = true;
2859 if (dest_loop->force_vectorize)
2860 cfun->has_force_vectorize_loops = true;
2861 if (id->src_cfun->last_clique != 0)
2862 dest_loop->owned_clique
2863 = remap_dependence_clique (id,
2864 src_loop->owned_clique
2865 ? src_loop->owned_clique : 1);
2867 /* Finally place it into the loop array and the loop tree. */
2868 place_new_loop (cfun, dest_loop);
2869 flow_loop_tree_node_add (dest_parent, dest_loop);
2871 if (src_loop->simduid)
2873 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2874 cfun->has_simduid_loops = true;
2877 /* Recurse. */
2878 copy_loops (id, dest_loop, src_loop);
2880 src_loop = src_loop->next;
2884 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2886 void
2887 redirect_all_calls (copy_body_data * id, basic_block bb)
2889 gimple_stmt_iterator si;
2890 gimple *last = last_stmt (bb);
2891 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2893 gimple *stmt = gsi_stmt (si);
2894 if (is_gimple_call (stmt))
2896 tree old_lhs = gimple_call_lhs (stmt);
2897 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2898 if (edge)
2900 gimple *new_stmt = edge->redirect_call_stmt_to_callee ();
2901 /* If IPA-SRA transformation, run as part of edge redirection,
2902 removed the LHS because it is unused, save it to
2903 killed_new_ssa_names so that we can prune it from debug
2904 statements. */
2905 if (old_lhs
2906 && TREE_CODE (old_lhs) == SSA_NAME
2907 && !gimple_call_lhs (new_stmt))
2909 if (!id->killed_new_ssa_names)
2910 id->killed_new_ssa_names = new hash_set<tree> (16);
2911 id->killed_new_ssa_names->add (old_lhs);
2914 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2915 gimple_purge_dead_eh_edges (bb);
2921 /* Make a copy of the body of FN so that it can be inserted inline in
2922 another function. Walks FN via CFG, returns new fndecl. */
2924 static tree
2925 copy_cfg_body (copy_body_data * id,
2926 basic_block entry_block_map, basic_block exit_block_map,
2927 basic_block new_entry)
2929 tree callee_fndecl = id->src_fn;
2930 /* Original cfun for the callee, doesn't change. */
2931 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2932 struct function *cfun_to_copy;
2933 basic_block bb;
2934 tree new_fndecl = NULL;
2935 bool need_debug_cleanup = false;
2936 int last;
2937 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2938 profile_count num = entry_block_map->count;
2940 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2942 /* Register specific tree functions. */
2943 gimple_register_cfg_hooks ();
2945 /* If we are inlining just region of the function, make sure to connect
2946 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2947 part of loop, we must compute frequency and probability of
2948 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2949 probabilities of edges incoming from nonduplicated region. */
2950 if (new_entry)
2952 edge e;
2953 edge_iterator ei;
2954 den = profile_count::zero ();
2956 FOR_EACH_EDGE (e, ei, new_entry->preds)
2957 if (!e->src->aux)
2958 den += e->count ();
2959 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2962 profile_count::adjust_for_ipa_scaling (&num, &den);
2964 /* Must have a CFG here at this point. */
2965 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2966 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2969 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2970 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2971 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2972 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2974 /* Duplicate any exception-handling regions. */
2975 if (cfun->eh)
2976 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2977 remap_decl_1, id);
2979 /* Use aux pointers to map the original blocks to copy. */
2980 FOR_EACH_BB_FN (bb, cfun_to_copy)
2981 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2983 basic_block new_bb = copy_bb (id, bb, num, den);
2984 bb->aux = new_bb;
2985 new_bb->aux = bb;
2986 new_bb->loop_father = entry_block_map->loop_father;
2989 last = last_basic_block_for_fn (cfun);
2991 /* Now that we've duplicated the blocks, duplicate their edges. */
2992 basic_block abnormal_goto_dest = NULL;
2993 if (id->call_stmt
2994 && stmt_can_make_abnormal_goto (id->call_stmt))
2996 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2998 bb = gimple_bb (id->call_stmt);
2999 gsi_next (&gsi);
3000 if (gsi_end_p (gsi))
3001 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3003 FOR_ALL_BB_FN (bb, cfun_to_copy)
3004 if (!id->blocks_to_copy
3005 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3006 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3007 abnormal_goto_dest, id);
3009 if (id->eh_landing_pad_dest)
3011 add_clobbers_to_eh_landing_pad (id);
3012 id->eh_landing_pad_dest = NULL;
3015 if (new_entry)
3017 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3018 EDGE_FALLTHRU);
3019 e->probability = profile_probability::always ();
3022 /* Duplicate the loop tree, if available and wanted. */
3023 if (loops_for_fn (src_cfun) != NULL
3024 && current_loops != NULL)
3026 copy_loops (id, entry_block_map->loop_father,
3027 get_loop (src_cfun, 0));
3028 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3029 loops_state_set (LOOPS_NEED_FIXUP);
3032 /* If the loop tree in the source function needed fixup, mark the
3033 destination loop tree for fixup, too. */
3034 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3035 loops_state_set (LOOPS_NEED_FIXUP);
3037 if (gimple_in_ssa_p (cfun))
3038 FOR_ALL_BB_FN (bb, cfun_to_copy)
3039 if (!id->blocks_to_copy
3040 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3041 copy_phis_for_bb (bb, id);
3043 FOR_ALL_BB_FN (bb, cfun_to_copy)
3044 if (bb->aux)
3046 if (need_debug_cleanup
3047 && bb->index != ENTRY_BLOCK
3048 && bb->index != EXIT_BLOCK)
3049 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3050 /* Update call edge destinations. This cannot be done before loop
3051 info is updated, because we may split basic blocks. */
3052 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3053 && bb->index != ENTRY_BLOCK
3054 && bb->index != EXIT_BLOCK)
3055 redirect_all_calls (id, (basic_block)bb->aux);
3056 ((basic_block)bb->aux)->aux = NULL;
3057 bb->aux = NULL;
3060 /* Zero out AUX fields of newly created block during EH edge
3061 insertion. */
3062 for (; last < last_basic_block_for_fn (cfun); last++)
3064 if (need_debug_cleanup)
3065 maybe_move_debug_stmts_to_successors (id,
3066 BASIC_BLOCK_FOR_FN (cfun, last));
3067 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3068 /* Update call edge destinations. This cannot be done before loop
3069 info is updated, because we may split basic blocks. */
3070 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3071 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3073 entry_block_map->aux = NULL;
3074 exit_block_map->aux = NULL;
3076 if (id->eh_map)
3078 delete id->eh_map;
3079 id->eh_map = NULL;
3081 if (id->dependence_map)
3083 delete id->dependence_map;
3084 id->dependence_map = NULL;
3087 return new_fndecl;
3090 /* Copy the debug STMT using ID. We deal with these statements in a
3091 special way: if any variable in their VALUE expression wasn't
3092 remapped yet, we won't remap it, because that would get decl uids
3093 out of sync, causing codegen differences between -g and -g0. If
3094 this arises, we drop the VALUE expression altogether. */
3096 static void
3097 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3099 tree t, *n;
3100 struct walk_stmt_info wi;
3102 if (tree block = gimple_block (stmt))
3104 n = id->decl_map->get (block);
3105 gimple_set_block (stmt, n ? *n : id->block);
3108 if (gimple_debug_nonbind_marker_p (stmt))
3109 return;
3111 /* Remap all the operands in COPY. */
3112 memset (&wi, 0, sizeof (wi));
3113 wi.info = id;
3115 processing_debug_stmt = 1;
3117 if (gimple_debug_source_bind_p (stmt))
3118 t = gimple_debug_source_bind_get_var (stmt);
3119 else if (gimple_debug_bind_p (stmt))
3120 t = gimple_debug_bind_get_var (stmt);
3121 else
3122 gcc_unreachable ();
3124 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3125 && (n = id->debug_map->get (t)))
3127 gcc_assert (VAR_P (*n));
3128 t = *n;
3130 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3131 /* T is a non-localized variable. */;
3132 else
3133 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3135 if (gimple_debug_bind_p (stmt))
3137 gimple_debug_bind_set_var (stmt, t);
3139 if (gimple_debug_bind_has_value_p (stmt))
3140 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3141 remap_gimple_op_r, &wi, NULL);
3143 /* Punt if any decl couldn't be remapped. */
3144 if (processing_debug_stmt < 0)
3145 gimple_debug_bind_reset_value (stmt);
3147 else if (gimple_debug_source_bind_p (stmt))
3149 gimple_debug_source_bind_set_var (stmt, t);
3150 /* When inlining and source bind refers to one of the optimized
3151 away parameters, change the source bind into normal debug bind
3152 referring to the corresponding DEBUG_EXPR_DECL that should have
3153 been bound before the call stmt. */
3154 t = gimple_debug_source_bind_get_value (stmt);
3155 if (t != NULL_TREE
3156 && TREE_CODE (t) == PARM_DECL
3157 && id->call_stmt)
3159 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3160 unsigned int i;
3161 if (debug_args != NULL)
3163 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3164 if ((**debug_args)[i] == DECL_ORIGIN (t)
3165 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3167 t = (**debug_args)[i + 1];
3168 stmt->subcode = GIMPLE_DEBUG_BIND;
3169 gimple_debug_bind_set_value (stmt, t);
3170 break;
3174 if (gimple_debug_source_bind_p (stmt))
3175 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3176 remap_gimple_op_r, &wi, NULL);
3179 processing_debug_stmt = 0;
3181 update_stmt (stmt);
3184 /* Process deferred debug stmts. In order to give values better odds
3185 of being successfully remapped, we delay the processing of debug
3186 stmts until all other stmts that might require remapping are
3187 processed. */
3189 static void
3190 copy_debug_stmts (copy_body_data *id)
3192 size_t i;
3193 gdebug *stmt;
3195 if (!id->debug_stmts.exists ())
3196 return;
3198 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3199 copy_debug_stmt (stmt, id);
3201 id->debug_stmts.release ();
3204 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3205 another function. */
3207 static tree
3208 copy_tree_body (copy_body_data *id)
3210 tree fndecl = id->src_fn;
3211 tree body = DECL_SAVED_TREE (fndecl);
3213 walk_tree (&body, copy_tree_body_r, id, NULL);
3215 return body;
3218 /* Make a copy of the body of FN so that it can be inserted inline in
3219 another function. */
3221 static tree
3222 copy_body (copy_body_data *id,
3223 basic_block entry_block_map, basic_block exit_block_map,
3224 basic_block new_entry)
3226 tree fndecl = id->src_fn;
3227 tree body;
3229 /* If this body has a CFG, walk CFG and copy. */
3230 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3231 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3232 new_entry);
3233 copy_debug_stmts (id);
3234 delete id->killed_new_ssa_names;
3235 id->killed_new_ssa_names = NULL;
3237 return body;
3240 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3241 defined in function FN, or of a data member thereof. */
3243 static bool
3244 self_inlining_addr_expr (tree value, tree fn)
3246 tree var;
3248 if (TREE_CODE (value) != ADDR_EXPR)
3249 return false;
3251 var = get_base_address (TREE_OPERAND (value, 0));
3253 return var && auto_var_in_fn_p (var, fn);
3256 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3257 lexical block and line number information from base_stmt, if given,
3258 or from the last stmt of the block otherwise. */
3260 static gimple *
3261 insert_init_debug_bind (copy_body_data *id,
3262 basic_block bb, tree var, tree value,
3263 gimple *base_stmt)
3265 gimple *note;
3266 gimple_stmt_iterator gsi;
3267 tree tracked_var;
3269 if (!gimple_in_ssa_p (id->src_cfun))
3270 return NULL;
3272 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3273 return NULL;
3275 tracked_var = target_for_debug_bind (var);
3276 if (!tracked_var)
3277 return NULL;
3279 if (bb)
3281 gsi = gsi_last_bb (bb);
3282 if (!base_stmt && !gsi_end_p (gsi))
3283 base_stmt = gsi_stmt (gsi);
3286 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3288 if (bb)
3290 if (!gsi_end_p (gsi))
3291 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3292 else
3293 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3296 return note;
3299 static void
3300 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3302 /* If VAR represents a zero-sized variable, it's possible that the
3303 assignment statement may result in no gimple statements. */
3304 if (init_stmt)
3306 gimple_stmt_iterator si = gsi_last_bb (bb);
3308 /* We can end up with init statements that store to a non-register
3309 from a rhs with a conversion. Handle that here by forcing the
3310 rhs into a temporary. gimple_regimplify_operands is not
3311 prepared to do this for us. */
3312 if (!is_gimple_debug (init_stmt)
3313 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3314 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3315 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3317 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3318 gimple_expr_type (init_stmt),
3319 gimple_assign_rhs1 (init_stmt));
3320 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3321 GSI_NEW_STMT);
3322 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3323 gimple_assign_set_rhs1 (init_stmt, rhs);
3325 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3326 gimple_regimplify_operands (init_stmt, &si);
3328 if (!is_gimple_debug (init_stmt))
3330 tree def = gimple_assign_lhs (init_stmt);
3331 insert_init_debug_bind (id, bb, def, def, init_stmt);
3336 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3337 if need be (which should only be necessary for invalid programs). Attempt
3338 to convert VAL to TYPE and return the result if it is possible, just return
3339 a zero constant of the given type if it fails. */
3341 tree
3342 force_value_to_type (tree type, tree value)
3344 /* If we can match up types by promotion/demotion do so. */
3345 if (fold_convertible_p (type, value))
3346 return fold_convert (type, value);
3348 /* ??? For valid programs we should not end up here.
3349 Still if we end up with truly mismatched types here, fall back
3350 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3351 GIMPLE to the following passes. */
3352 if (!is_gimple_reg_type (TREE_TYPE (value))
3353 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3354 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3355 else
3356 return build_zero_cst (type);
3359 /* Initialize parameter P with VALUE. If needed, produce init statement
3360 at the end of BB. When BB is NULL, we return init statement to be
3361 output later. */
3362 static gimple *
3363 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3364 basic_block bb, tree *vars)
3366 gimple *init_stmt = NULL;
3367 tree var;
3368 tree rhs = value;
3369 tree def = (gimple_in_ssa_p (cfun)
3370 ? ssa_default_def (id->src_cfun, p) : NULL);
3372 if (value
3373 && value != error_mark_node
3374 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3375 rhs = force_value_to_type (TREE_TYPE (p), value);
3377 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3378 here since the type of this decl must be visible to the calling
3379 function. */
3380 var = copy_decl_to_var (p, id);
3382 /* Declare this new variable. */
3383 DECL_CHAIN (var) = *vars;
3384 *vars = var;
3386 /* Make gimplifier happy about this variable. */
3387 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3389 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3390 we would not need to create a new variable here at all, if it
3391 weren't for debug info. Still, we can just use the argument
3392 value. */
3393 if (TREE_READONLY (p)
3394 && !TREE_ADDRESSABLE (p)
3395 && value && !TREE_SIDE_EFFECTS (value)
3396 && !def)
3398 /* We may produce non-gimple trees by adding NOPs or introduce
3399 invalid sharing when operand is not really constant.
3400 It is not big deal to prohibit constant propagation here as
3401 we will constant propagate in DOM1 pass anyway. */
3402 if (is_gimple_min_invariant (value)
3403 && useless_type_conversion_p (TREE_TYPE (p),
3404 TREE_TYPE (value))
3405 /* We have to be very careful about ADDR_EXPR. Make sure
3406 the base variable isn't a local variable of the inlined
3407 function, e.g., when doing recursive inlining, direct or
3408 mutually-recursive or whatever, which is why we don't
3409 just test whether fn == current_function_decl. */
3410 && ! self_inlining_addr_expr (value, fn))
3412 insert_decl_map (id, p, value);
3413 insert_debug_decl_map (id, p, var);
3414 return insert_init_debug_bind (id, bb, var, value, NULL);
3418 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3419 that way, when the PARM_DECL is encountered, it will be
3420 automatically replaced by the VAR_DECL. */
3421 insert_decl_map (id, p, var);
3423 /* Even if P was TREE_READONLY, the new VAR should not be.
3424 In the original code, we would have constructed a
3425 temporary, and then the function body would have never
3426 changed the value of P. However, now, we will be
3427 constructing VAR directly. The constructor body may
3428 change its value multiple times as it is being
3429 constructed. Therefore, it must not be TREE_READONLY;
3430 the back-end assumes that TREE_READONLY variable is
3431 assigned to only once. */
3432 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3433 TREE_READONLY (var) = 0;
3435 /* If there is no setup required and we are in SSA, take the easy route
3436 replacing all SSA names representing the function parameter by the
3437 SSA name passed to function.
3439 We need to construct map for the variable anyway as it might be used
3440 in different SSA names when parameter is set in function.
3442 Do replacement at -O0 for const arguments replaced by constant.
3443 This is important for builtin_constant_p and other construct requiring
3444 constant argument to be visible in inlined function body. */
3445 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3446 && (optimize
3447 || (TREE_READONLY (p)
3448 && is_gimple_min_invariant (rhs)))
3449 && (TREE_CODE (rhs) == SSA_NAME
3450 || is_gimple_min_invariant (rhs))
3451 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3453 insert_decl_map (id, def, rhs);
3454 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3457 /* If the value of argument is never used, don't care about initializing
3458 it. */
3459 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3461 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3462 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3465 /* Initialize this VAR_DECL from the equivalent argument. Convert
3466 the argument to the proper type in case it was promoted. */
3467 if (value)
3469 if (rhs == error_mark_node)
3471 insert_decl_map (id, p, var);
3472 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3475 STRIP_USELESS_TYPE_CONVERSION (rhs);
3477 /* If we are in SSA form properly remap the default definition
3478 or assign to a dummy SSA name if the parameter is unused and
3479 we are not optimizing. */
3480 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3482 if (def)
3484 def = remap_ssa_name (def, id);
3485 init_stmt = gimple_build_assign (def, rhs);
3486 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3487 set_ssa_default_def (cfun, var, NULL);
3489 else if (!optimize)
3491 def = make_ssa_name (var);
3492 init_stmt = gimple_build_assign (def, rhs);
3495 else
3496 init_stmt = gimple_build_assign (var, rhs);
3498 if (bb && init_stmt)
3499 insert_init_stmt (id, bb, init_stmt);
3501 return init_stmt;
3504 /* Generate code to initialize the parameters of the function at the
3505 top of the stack in ID from the GIMPLE_CALL STMT. */
3507 static void
3508 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3509 tree fn, basic_block bb)
3511 tree parms;
3512 size_t i;
3513 tree p;
3514 tree vars = NULL_TREE;
3515 tree static_chain = gimple_call_chain (stmt);
3517 /* Figure out what the parameters are. */
3518 parms = DECL_ARGUMENTS (fn);
3520 /* Loop through the parameter declarations, replacing each with an
3521 equivalent VAR_DECL, appropriately initialized. */
3522 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3524 tree val;
3525 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3526 setup_one_parameter (id, p, val, fn, bb, &vars);
3528 /* After remapping parameters remap their types. This has to be done
3529 in a second loop over all parameters to appropriately remap
3530 variable sized arrays when the size is specified in a
3531 parameter following the array. */
3532 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3534 tree *varp = id->decl_map->get (p);
3535 if (varp && VAR_P (*varp))
3537 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3538 ? ssa_default_def (id->src_cfun, p) : NULL);
3539 tree var = *varp;
3540 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3541 /* Also remap the default definition if it was remapped
3542 to the default definition of the parameter replacement
3543 by the parameter setup. */
3544 if (def)
3546 tree *defp = id->decl_map->get (def);
3547 if (defp
3548 && TREE_CODE (*defp) == SSA_NAME
3549 && SSA_NAME_VAR (*defp) == var)
3550 TREE_TYPE (*defp) = TREE_TYPE (var);
3555 /* Initialize the static chain. */
3556 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3557 gcc_assert (fn != current_function_decl);
3558 if (p)
3560 /* No static chain? Seems like a bug in tree-nested.c. */
3561 gcc_assert (static_chain);
3563 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3566 declare_inline_vars (id->block, vars);
3570 /* Declare a return variable to replace the RESULT_DECL for the
3571 function we are calling. An appropriate DECL_STMT is returned.
3572 The USE_STMT is filled to contain a use of the declaration to
3573 indicate the return value of the function.
3575 RETURN_SLOT, if non-null is place where to store the result. It
3576 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3577 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3579 The return value is a (possibly null) value that holds the result
3580 as seen by the caller. */
3582 static tree
3583 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3584 basic_block entry_bb)
3586 tree callee = id->src_fn;
3587 tree result = DECL_RESULT (callee);
3588 tree callee_type = TREE_TYPE (result);
3589 tree caller_type;
3590 tree var, use;
3592 /* Handle type-mismatches in the function declaration return type
3593 vs. the call expression. */
3594 if (modify_dest)
3595 caller_type = TREE_TYPE (modify_dest);
3596 else if (return_slot)
3597 caller_type = TREE_TYPE (return_slot);
3598 else /* No LHS on the call. */
3599 caller_type = TREE_TYPE (TREE_TYPE (callee));
3601 /* We don't need to do anything for functions that don't return anything. */
3602 if (VOID_TYPE_P (callee_type))
3603 return NULL_TREE;
3605 /* If there was a return slot, then the return value is the
3606 dereferenced address of that object. */
3607 if (return_slot)
3609 /* The front end shouldn't have used both return_slot and
3610 a modify expression. */
3611 gcc_assert (!modify_dest);
3612 if (DECL_BY_REFERENCE (result))
3614 tree return_slot_addr = build_fold_addr_expr (return_slot);
3615 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3617 /* We are going to construct *&return_slot and we can't do that
3618 for variables believed to be not addressable.
3620 FIXME: This check possibly can match, because values returned
3621 via return slot optimization are not believed to have address
3622 taken by alias analysis. */
3623 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3624 var = return_slot_addr;
3625 mark_addressable (return_slot);
3627 else
3629 var = return_slot;
3630 gcc_assert (TREE_CODE (var) != SSA_NAME);
3631 if (TREE_ADDRESSABLE (result))
3632 mark_addressable (var);
3634 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3635 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3636 && !DECL_GIMPLE_REG_P (result)
3637 && DECL_P (var))
3638 DECL_GIMPLE_REG_P (var) = 0;
3640 if (!useless_type_conversion_p (callee_type, caller_type))
3641 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3643 use = NULL;
3644 goto done;
3647 /* All types requiring non-trivial constructors should have been handled. */
3648 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3650 /* Attempt to avoid creating a new temporary variable. */
3651 if (modify_dest
3652 && TREE_CODE (modify_dest) != SSA_NAME)
3654 bool use_it = false;
3656 /* We can't use MODIFY_DEST if there's type promotion involved. */
3657 if (!useless_type_conversion_p (callee_type, caller_type))
3658 use_it = false;
3660 /* ??? If we're assigning to a variable sized type, then we must
3661 reuse the destination variable, because we've no good way to
3662 create variable sized temporaries at this point. */
3663 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3664 use_it = true;
3666 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3667 reuse it as the result of the call directly. Don't do this if
3668 it would promote MODIFY_DEST to addressable. */
3669 else if (TREE_ADDRESSABLE (result))
3670 use_it = false;
3671 else
3673 tree base_m = get_base_address (modify_dest);
3675 /* If the base isn't a decl, then it's a pointer, and we don't
3676 know where that's going to go. */
3677 if (!DECL_P (base_m))
3678 use_it = false;
3679 else if (is_global_var (base_m))
3680 use_it = false;
3681 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3682 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3683 && !DECL_GIMPLE_REG_P (result)
3684 && DECL_GIMPLE_REG_P (base_m))
3685 use_it = false;
3686 else if (!TREE_ADDRESSABLE (base_m))
3687 use_it = true;
3690 if (use_it)
3692 var = modify_dest;
3693 use = NULL;
3694 goto done;
3698 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3700 var = copy_result_decl_to_var (result, id);
3701 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3703 /* Do not have the rest of GCC warn about this variable as it should
3704 not be visible to the user. */
3705 TREE_NO_WARNING (var) = 1;
3707 declare_inline_vars (id->block, var);
3709 /* Build the use expr. If the return type of the function was
3710 promoted, convert it back to the expected type. */
3711 use = var;
3712 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3714 /* If we can match up types by promotion/demotion do so. */
3715 if (fold_convertible_p (caller_type, var))
3716 use = fold_convert (caller_type, var);
3717 else
3719 /* ??? For valid programs we should not end up here.
3720 Still if we end up with truly mismatched types here, fall back
3721 to using a MEM_REF to not leak invalid GIMPLE to the following
3722 passes. */
3723 /* Prevent var from being written into SSA form. */
3724 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3725 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3726 DECL_GIMPLE_REG_P (var) = false;
3727 else if (is_gimple_reg_type (TREE_TYPE (var)))
3728 TREE_ADDRESSABLE (var) = true;
3729 use = fold_build2 (MEM_REF, caller_type,
3730 build_fold_addr_expr (var),
3731 build_int_cst (ptr_type_node, 0));
3735 STRIP_USELESS_TYPE_CONVERSION (use);
3737 if (DECL_BY_REFERENCE (result))
3739 TREE_ADDRESSABLE (var) = 1;
3740 var = build_fold_addr_expr (var);
3743 done:
3744 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3745 way, when the RESULT_DECL is encountered, it will be
3746 automatically replaced by the VAR_DECL.
3748 When returning by reference, ensure that RESULT_DECL remaps to
3749 gimple_val. */
3750 if (DECL_BY_REFERENCE (result)
3751 && !is_gimple_val (var))
3753 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3754 insert_decl_map (id, result, temp);
3755 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3756 it's default_def SSA_NAME. */
3757 if (gimple_in_ssa_p (id->src_cfun)
3758 && is_gimple_reg (result))
3760 temp = make_ssa_name (temp);
3761 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3763 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3765 else
3766 insert_decl_map (id, result, var);
3768 /* Remember this so we can ignore it in remap_decls. */
3769 id->retvar = var;
3770 return use;
3773 /* Determine if the function can be copied. If so return NULL. If
3774 not return a string describng the reason for failure. */
3776 const char *
3777 copy_forbidden (struct function *fun)
3779 const char *reason = fun->cannot_be_copied_reason;
3781 /* Only examine the function once. */
3782 if (fun->cannot_be_copied_set)
3783 return reason;
3785 /* We cannot copy a function that receives a non-local goto
3786 because we cannot remap the destination label used in the
3787 function that is performing the non-local goto. */
3788 /* ??? Actually, this should be possible, if we work at it.
3789 No doubt there's just a handful of places that simply
3790 assume it doesn't happen and don't substitute properly. */
3791 if (fun->has_nonlocal_label)
3793 reason = G_("function %q+F can never be copied "
3794 "because it receives a non-local goto");
3795 goto fail;
3798 if (fun->has_forced_label_in_static)
3800 reason = G_("function %q+F can never be copied because it saves "
3801 "address of local label in a static variable");
3802 goto fail;
3805 fail:
3806 fun->cannot_be_copied_reason = reason;
3807 fun->cannot_be_copied_set = true;
3808 return reason;
3812 static const char *inline_forbidden_reason;
3814 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3815 iff a function cannot be inlined. Also sets the reason why. */
3817 static tree
3818 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3819 struct walk_stmt_info *wip)
3821 tree fn = (tree) wip->info;
3822 tree t;
3823 gimple *stmt = gsi_stmt (*gsi);
3825 switch (gimple_code (stmt))
3827 case GIMPLE_CALL:
3828 /* Refuse to inline alloca call unless user explicitly forced so as
3829 this may change program's memory overhead drastically when the
3830 function using alloca is called in loop. In GCC present in
3831 SPEC2000 inlining into schedule_block cause it to require 2GB of
3832 RAM instead of 256MB. Don't do so for alloca calls emitted for
3833 VLA objects as those can't cause unbounded growth (they're always
3834 wrapped inside stack_save/stack_restore regions. */
3835 if (gimple_maybe_alloca_call_p (stmt)
3836 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3837 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3839 inline_forbidden_reason
3840 = G_("function %q+F can never be inlined because it uses "
3841 "alloca (override using the always_inline attribute)");
3842 *handled_ops_p = true;
3843 return fn;
3846 t = gimple_call_fndecl (stmt);
3847 if (t == NULL_TREE)
3848 break;
3850 /* We cannot inline functions that call setjmp. */
3851 if (setjmp_call_p (t))
3853 inline_forbidden_reason
3854 = G_("function %q+F can never be inlined because it uses setjmp");
3855 *handled_ops_p = true;
3856 return t;
3859 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3860 switch (DECL_FUNCTION_CODE (t))
3862 /* We cannot inline functions that take a variable number of
3863 arguments. */
3864 case BUILT_IN_VA_START:
3865 case BUILT_IN_NEXT_ARG:
3866 case BUILT_IN_VA_END:
3867 inline_forbidden_reason
3868 = G_("function %q+F can never be inlined because it "
3869 "uses variable argument lists");
3870 *handled_ops_p = true;
3871 return t;
3873 case BUILT_IN_LONGJMP:
3874 /* We can't inline functions that call __builtin_longjmp at
3875 all. The non-local goto machinery really requires the
3876 destination be in a different function. If we allow the
3877 function calling __builtin_longjmp to be inlined into the
3878 function calling __builtin_setjmp, Things will Go Awry. */
3879 inline_forbidden_reason
3880 = G_("function %q+F can never be inlined because "
3881 "it uses setjmp-longjmp exception handling");
3882 *handled_ops_p = true;
3883 return t;
3885 case BUILT_IN_NONLOCAL_GOTO:
3886 /* Similarly. */
3887 inline_forbidden_reason
3888 = G_("function %q+F can never be inlined because "
3889 "it uses non-local goto");
3890 *handled_ops_p = true;
3891 return t;
3893 case BUILT_IN_RETURN:
3894 case BUILT_IN_APPLY_ARGS:
3895 /* If a __builtin_apply_args caller would be inlined,
3896 it would be saving arguments of the function it has
3897 been inlined into. Similarly __builtin_return would
3898 return from the function the inline has been inlined into. */
3899 inline_forbidden_reason
3900 = G_("function %q+F can never be inlined because "
3901 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3902 *handled_ops_p = true;
3903 return t;
3905 default:
3906 break;
3908 break;
3910 case GIMPLE_GOTO:
3911 t = gimple_goto_dest (stmt);
3913 /* We will not inline a function which uses computed goto. The
3914 addresses of its local labels, which may be tucked into
3915 global storage, are of course not constant across
3916 instantiations, which causes unexpected behavior. */
3917 if (TREE_CODE (t) != LABEL_DECL)
3919 inline_forbidden_reason
3920 = G_("function %q+F can never be inlined "
3921 "because it contains a computed goto");
3922 *handled_ops_p = true;
3923 return t;
3925 break;
3927 default:
3928 break;
3931 *handled_ops_p = false;
3932 return NULL_TREE;
3935 /* Return true if FNDECL is a function that cannot be inlined into
3936 another one. */
3938 static bool
3939 inline_forbidden_p (tree fndecl)
3941 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3942 struct walk_stmt_info wi;
3943 basic_block bb;
3944 bool forbidden_p = false;
3946 /* First check for shared reasons not to copy the code. */
3947 inline_forbidden_reason = copy_forbidden (fun);
3948 if (inline_forbidden_reason != NULL)
3949 return true;
3951 /* Next, walk the statements of the function looking for
3952 constraucts we can't handle, or are non-optimal for inlining. */
3953 hash_set<tree> visited_nodes;
3954 memset (&wi, 0, sizeof (wi));
3955 wi.info = (void *) fndecl;
3956 wi.pset = &visited_nodes;
3958 FOR_EACH_BB_FN (bb, fun)
3960 gimple *ret;
3961 gimple_seq seq = bb_seq (bb);
3962 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3963 forbidden_p = (ret != NULL);
3964 if (forbidden_p)
3965 break;
3968 return forbidden_p;
3971 /* Return false if the function FNDECL cannot be inlined on account of its
3972 attributes, true otherwise. */
3973 static bool
3974 function_attribute_inlinable_p (const_tree fndecl)
3976 if (targetm.attribute_table)
3978 const_tree a;
3980 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3982 const_tree name = get_attribute_name (a);
3983 int i;
3985 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3986 if (is_attribute_p (targetm.attribute_table[i].name, name))
3987 return targetm.function_attribute_inlinable_p (fndecl);
3991 return true;
3994 /* Returns nonzero if FN is a function that does not have any
3995 fundamental inline blocking properties. */
3997 bool
3998 tree_inlinable_function_p (tree fn)
4000 bool inlinable = true;
4001 bool do_warning;
4002 tree always_inline;
4004 /* If we've already decided this function shouldn't be inlined,
4005 there's no need to check again. */
4006 if (DECL_UNINLINABLE (fn))
4007 return false;
4009 /* We only warn for functions declared `inline' by the user. */
4010 do_warning = (warn_inline
4011 && DECL_DECLARED_INLINE_P (fn)
4012 && !DECL_NO_INLINE_WARNING_P (fn)
4013 && !DECL_IN_SYSTEM_HEADER (fn));
4015 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4017 if (flag_no_inline
4018 && always_inline == NULL)
4020 if (do_warning)
4021 warning (OPT_Winline, "function %q+F can never be inlined because it "
4022 "is suppressed using %<-fno-inline%>", fn);
4023 inlinable = false;
4026 else if (!function_attribute_inlinable_p (fn))
4028 if (do_warning)
4029 warning (OPT_Winline, "function %q+F can never be inlined because it "
4030 "uses attributes conflicting with inlining", fn);
4031 inlinable = false;
4034 else if (inline_forbidden_p (fn))
4036 /* See if we should warn about uninlinable functions. Previously,
4037 some of these warnings would be issued while trying to expand
4038 the function inline, but that would cause multiple warnings
4039 about functions that would for example call alloca. But since
4040 this a property of the function, just one warning is enough.
4041 As a bonus we can now give more details about the reason why a
4042 function is not inlinable. */
4043 if (always_inline)
4044 error (inline_forbidden_reason, fn);
4045 else if (do_warning)
4046 warning (OPT_Winline, inline_forbidden_reason, fn);
4048 inlinable = false;
4051 /* Squirrel away the result so that we don't have to check again. */
4052 DECL_UNINLINABLE (fn) = !inlinable;
4054 return inlinable;
4057 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4058 word size and take possible memcpy call into account and return
4059 cost based on whether optimizing for size or speed according to SPEED_P. */
4062 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4064 HOST_WIDE_INT size;
4066 gcc_assert (!VOID_TYPE_P (type));
4068 if (TREE_CODE (type) == VECTOR_TYPE)
4070 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4071 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4072 int orig_mode_size
4073 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4074 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4075 return ((orig_mode_size + simd_mode_size - 1)
4076 / simd_mode_size);
4079 size = int_size_in_bytes (type);
4081 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4082 /* Cost of a memcpy call, 3 arguments and the call. */
4083 return 4;
4084 else
4085 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4088 /* Returns cost of operation CODE, according to WEIGHTS */
4090 static int
4091 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4092 tree op1 ATTRIBUTE_UNUSED, tree op2)
4094 switch (code)
4096 /* These are "free" conversions, or their presumed cost
4097 is folded into other operations. */
4098 case RANGE_EXPR:
4099 CASE_CONVERT:
4100 case COMPLEX_EXPR:
4101 case PAREN_EXPR:
4102 case VIEW_CONVERT_EXPR:
4103 return 0;
4105 /* Assign cost of 1 to usual operations.
4106 ??? We may consider mapping RTL costs to this. */
4107 case COND_EXPR:
4108 case VEC_COND_EXPR:
4109 case VEC_PERM_EXPR:
4111 case PLUS_EXPR:
4112 case POINTER_PLUS_EXPR:
4113 case POINTER_DIFF_EXPR:
4114 case MINUS_EXPR:
4115 case MULT_EXPR:
4116 case MULT_HIGHPART_EXPR:
4118 case ADDR_SPACE_CONVERT_EXPR:
4119 case FIXED_CONVERT_EXPR:
4120 case FIX_TRUNC_EXPR:
4122 case NEGATE_EXPR:
4123 case FLOAT_EXPR:
4124 case MIN_EXPR:
4125 case MAX_EXPR:
4126 case ABS_EXPR:
4127 case ABSU_EXPR:
4129 case LSHIFT_EXPR:
4130 case RSHIFT_EXPR:
4131 case LROTATE_EXPR:
4132 case RROTATE_EXPR:
4134 case BIT_IOR_EXPR:
4135 case BIT_XOR_EXPR:
4136 case BIT_AND_EXPR:
4137 case BIT_NOT_EXPR:
4139 case TRUTH_ANDIF_EXPR:
4140 case TRUTH_ORIF_EXPR:
4141 case TRUTH_AND_EXPR:
4142 case TRUTH_OR_EXPR:
4143 case TRUTH_XOR_EXPR:
4144 case TRUTH_NOT_EXPR:
4146 case LT_EXPR:
4147 case LE_EXPR:
4148 case GT_EXPR:
4149 case GE_EXPR:
4150 case EQ_EXPR:
4151 case NE_EXPR:
4152 case ORDERED_EXPR:
4153 case UNORDERED_EXPR:
4155 case UNLT_EXPR:
4156 case UNLE_EXPR:
4157 case UNGT_EXPR:
4158 case UNGE_EXPR:
4159 case UNEQ_EXPR:
4160 case LTGT_EXPR:
4162 case CONJ_EXPR:
4164 case PREDECREMENT_EXPR:
4165 case PREINCREMENT_EXPR:
4166 case POSTDECREMENT_EXPR:
4167 case POSTINCREMENT_EXPR:
4169 case REALIGN_LOAD_EXPR:
4171 case WIDEN_SUM_EXPR:
4172 case WIDEN_MULT_EXPR:
4173 case DOT_PROD_EXPR:
4174 case SAD_EXPR:
4175 case WIDEN_MULT_PLUS_EXPR:
4176 case WIDEN_MULT_MINUS_EXPR:
4177 case WIDEN_LSHIFT_EXPR:
4179 case VEC_WIDEN_MULT_HI_EXPR:
4180 case VEC_WIDEN_MULT_LO_EXPR:
4181 case VEC_WIDEN_MULT_EVEN_EXPR:
4182 case VEC_WIDEN_MULT_ODD_EXPR:
4183 case VEC_UNPACK_HI_EXPR:
4184 case VEC_UNPACK_LO_EXPR:
4185 case VEC_UNPACK_FLOAT_HI_EXPR:
4186 case VEC_UNPACK_FLOAT_LO_EXPR:
4187 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4188 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4189 case VEC_PACK_TRUNC_EXPR:
4190 case VEC_PACK_SAT_EXPR:
4191 case VEC_PACK_FIX_TRUNC_EXPR:
4192 case VEC_PACK_FLOAT_EXPR:
4193 case VEC_WIDEN_LSHIFT_HI_EXPR:
4194 case VEC_WIDEN_LSHIFT_LO_EXPR:
4195 case VEC_DUPLICATE_EXPR:
4196 case VEC_SERIES_EXPR:
4198 return 1;
4200 /* Few special cases of expensive operations. This is useful
4201 to avoid inlining on functions having too many of these. */
4202 case TRUNC_DIV_EXPR:
4203 case CEIL_DIV_EXPR:
4204 case FLOOR_DIV_EXPR:
4205 case ROUND_DIV_EXPR:
4206 case EXACT_DIV_EXPR:
4207 case TRUNC_MOD_EXPR:
4208 case CEIL_MOD_EXPR:
4209 case FLOOR_MOD_EXPR:
4210 case ROUND_MOD_EXPR:
4211 case RDIV_EXPR:
4212 if (TREE_CODE (op2) != INTEGER_CST)
4213 return weights->div_mod_cost;
4214 return 1;
4216 /* Bit-field insertion needs several shift and mask operations. */
4217 case BIT_INSERT_EXPR:
4218 return 3;
4220 default:
4221 /* We expect a copy assignment with no operator. */
4222 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4223 return 0;
4228 /* Estimate number of instructions that will be created by expanding
4229 the statements in the statement sequence STMTS.
4230 WEIGHTS contains weights attributed to various constructs. */
4233 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4235 int cost;
4236 gimple_stmt_iterator gsi;
4238 cost = 0;
4239 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4240 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4242 return cost;
4246 /* Estimate number of instructions that will be created by expanding STMT.
4247 WEIGHTS contains weights attributed to various constructs. */
4250 estimate_num_insns (gimple *stmt, eni_weights *weights)
4252 unsigned cost, i;
4253 enum gimple_code code = gimple_code (stmt);
4254 tree lhs;
4255 tree rhs;
4257 switch (code)
4259 case GIMPLE_ASSIGN:
4260 /* Try to estimate the cost of assignments. We have three cases to
4261 deal with:
4262 1) Simple assignments to registers;
4263 2) Stores to things that must live in memory. This includes
4264 "normal" stores to scalars, but also assignments of large
4265 structures, or constructors of big arrays;
4267 Let us look at the first two cases, assuming we have "a = b + C":
4268 <GIMPLE_ASSIGN <var_decl "a">
4269 <plus_expr <var_decl "b"> <constant C>>
4270 If "a" is a GIMPLE register, the assignment to it is free on almost
4271 any target, because "a" usually ends up in a real register. Hence
4272 the only cost of this expression comes from the PLUS_EXPR, and we
4273 can ignore the GIMPLE_ASSIGN.
4274 If "a" is not a GIMPLE register, the assignment to "a" will most
4275 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4276 of moving something into "a", which we compute using the function
4277 estimate_move_cost. */
4278 if (gimple_clobber_p (stmt))
4279 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4281 lhs = gimple_assign_lhs (stmt);
4282 rhs = gimple_assign_rhs1 (stmt);
4284 cost = 0;
4286 /* Account for the cost of moving to / from memory. */
4287 if (gimple_store_p (stmt))
4288 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4289 if (gimple_assign_load_p (stmt))
4290 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4292 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4293 gimple_assign_rhs1 (stmt),
4294 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4295 == GIMPLE_BINARY_RHS
4296 ? gimple_assign_rhs2 (stmt) : NULL);
4297 break;
4299 case GIMPLE_COND:
4300 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4301 gimple_op (stmt, 0),
4302 gimple_op (stmt, 1));
4303 break;
4305 case GIMPLE_SWITCH:
4307 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4308 /* Take into account cost of the switch + guess 2 conditional jumps for
4309 each case label.
4311 TODO: once the switch expansion logic is sufficiently separated, we can
4312 do better job on estimating cost of the switch. */
4313 if (weights->time_based)
4314 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4315 else
4316 cost = gimple_switch_num_labels (switch_stmt) * 2;
4318 break;
4320 case GIMPLE_CALL:
4322 tree decl;
4324 if (gimple_call_internal_p (stmt))
4325 return 0;
4326 else if ((decl = gimple_call_fndecl (stmt))
4327 && fndecl_built_in_p (decl))
4329 /* Do not special case builtins where we see the body.
4330 This just confuse inliner. */
4331 struct cgraph_node *node;
4332 if (!(node = cgraph_node::get (decl))
4333 || node->definition)
4335 /* For buitins that are likely expanded to nothing or
4336 inlined do not account operand costs. */
4337 else if (is_simple_builtin (decl))
4338 return 0;
4339 else if (is_inexpensive_builtin (decl))
4340 return weights->target_builtin_call_cost;
4341 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4343 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4344 specialize the cheap expansion we do here.
4345 ??? This asks for a more general solution. */
4346 switch (DECL_FUNCTION_CODE (decl))
4348 case BUILT_IN_POW:
4349 case BUILT_IN_POWF:
4350 case BUILT_IN_POWL:
4351 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4352 && (real_equal
4353 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4354 &dconst2)))
4355 return estimate_operator_cost
4356 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4357 gimple_call_arg (stmt, 0));
4358 break;
4360 default:
4361 break;
4366 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4367 if (gimple_call_lhs (stmt))
4368 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4369 weights->time_based);
4370 for (i = 0; i < gimple_call_num_args (stmt); i++)
4372 tree arg = gimple_call_arg (stmt, i);
4373 cost += estimate_move_cost (TREE_TYPE (arg),
4374 weights->time_based);
4376 break;
4379 case GIMPLE_RETURN:
4380 return weights->return_cost;
4382 case GIMPLE_GOTO:
4383 case GIMPLE_LABEL:
4384 case GIMPLE_NOP:
4385 case GIMPLE_PHI:
4386 case GIMPLE_PREDICT:
4387 case GIMPLE_DEBUG:
4388 return 0;
4390 case GIMPLE_ASM:
4392 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4393 /* 1000 means infinity. This avoids overflows later
4394 with very long asm statements. */
4395 if (count > 1000)
4396 count = 1000;
4397 /* If this asm is asm inline, count anything as minimum size. */
4398 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4399 count = MIN (1, count);
4400 return MAX (1, count);
4403 case GIMPLE_RESX:
4404 /* This is either going to be an external function call with one
4405 argument, or two register copy statements plus a goto. */
4406 return 2;
4408 case GIMPLE_EH_DISPATCH:
4409 /* ??? This is going to turn into a switch statement. Ideally
4410 we'd have a look at the eh region and estimate the number of
4411 edges involved. */
4412 return 10;
4414 case GIMPLE_BIND:
4415 return estimate_num_insns_seq (
4416 gimple_bind_body (as_a <gbind *> (stmt)),
4417 weights);
4419 case GIMPLE_EH_FILTER:
4420 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4422 case GIMPLE_CATCH:
4423 return estimate_num_insns_seq (gimple_catch_handler (
4424 as_a <gcatch *> (stmt)),
4425 weights);
4427 case GIMPLE_TRY:
4428 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4429 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4431 /* OMP directives are generally very expensive. */
4433 case GIMPLE_OMP_RETURN:
4434 case GIMPLE_OMP_SECTIONS_SWITCH:
4435 case GIMPLE_OMP_ATOMIC_STORE:
4436 case GIMPLE_OMP_CONTINUE:
4437 /* ...except these, which are cheap. */
4438 return 0;
4440 case GIMPLE_OMP_ATOMIC_LOAD:
4441 return weights->omp_cost;
4443 case GIMPLE_OMP_FOR:
4444 return (weights->omp_cost
4445 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4446 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4448 case GIMPLE_OMP_PARALLEL:
4449 case GIMPLE_OMP_TASK:
4450 case GIMPLE_OMP_CRITICAL:
4451 case GIMPLE_OMP_MASTER:
4452 case GIMPLE_OMP_TASKGROUP:
4453 case GIMPLE_OMP_ORDERED:
4454 case GIMPLE_OMP_SCAN:
4455 case GIMPLE_OMP_SECTION:
4456 case GIMPLE_OMP_SECTIONS:
4457 case GIMPLE_OMP_SINGLE:
4458 case GIMPLE_OMP_TARGET:
4459 case GIMPLE_OMP_TEAMS:
4460 return (weights->omp_cost
4461 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4463 case GIMPLE_TRANSACTION:
4464 return (weights->tm_cost
4465 + estimate_num_insns_seq (gimple_transaction_body (
4466 as_a <gtransaction *> (stmt)),
4467 weights));
4469 default:
4470 gcc_unreachable ();
4473 return cost;
4476 /* Estimate number of instructions that will be created by expanding
4477 function FNDECL. WEIGHTS contains weights attributed to various
4478 constructs. */
4481 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4483 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4484 gimple_stmt_iterator bsi;
4485 basic_block bb;
4486 int n = 0;
4488 gcc_assert (my_function && my_function->cfg);
4489 FOR_EACH_BB_FN (bb, my_function)
4491 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4492 n += estimate_num_insns (gsi_stmt (bsi), weights);
4495 return n;
4499 /* Initializes weights used by estimate_num_insns. */
4501 void
4502 init_inline_once (void)
4504 eni_size_weights.call_cost = 1;
4505 eni_size_weights.indirect_call_cost = 3;
4506 eni_size_weights.target_builtin_call_cost = 1;
4507 eni_size_weights.div_mod_cost = 1;
4508 eni_size_weights.omp_cost = 40;
4509 eni_size_weights.tm_cost = 10;
4510 eni_size_weights.time_based = false;
4511 eni_size_weights.return_cost = 1;
4513 /* Estimating time for call is difficult, since we have no idea what the
4514 called function does. In the current uses of eni_time_weights,
4515 underestimating the cost does less harm than overestimating it, so
4516 we choose a rather small value here. */
4517 eni_time_weights.call_cost = 10;
4518 eni_time_weights.indirect_call_cost = 15;
4519 eni_time_weights.target_builtin_call_cost = 1;
4520 eni_time_weights.div_mod_cost = 10;
4521 eni_time_weights.omp_cost = 40;
4522 eni_time_weights.tm_cost = 40;
4523 eni_time_weights.time_based = true;
4524 eni_time_weights.return_cost = 2;
4528 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4530 static void
4531 prepend_lexical_block (tree current_block, tree new_block)
4533 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4534 BLOCK_SUBBLOCKS (current_block) = new_block;
4535 BLOCK_SUPERCONTEXT (new_block) = current_block;
4538 /* Add local variables from CALLEE to CALLER. */
4540 static inline void
4541 add_local_variables (struct function *callee, struct function *caller,
4542 copy_body_data *id)
4544 tree var;
4545 unsigned ix;
4547 FOR_EACH_LOCAL_DECL (callee, ix, var)
4548 if (!can_be_nonlocal (var, id))
4550 tree new_var = remap_decl (var, id);
4552 /* Remap debug-expressions. */
4553 if (VAR_P (new_var)
4554 && DECL_HAS_DEBUG_EXPR_P (var)
4555 && new_var != var)
4557 tree tem = DECL_DEBUG_EXPR (var);
4558 bool old_regimplify = id->regimplify;
4559 id->remapping_type_depth++;
4560 walk_tree (&tem, copy_tree_body_r, id, NULL);
4561 id->remapping_type_depth--;
4562 id->regimplify = old_regimplify;
4563 SET_DECL_DEBUG_EXPR (new_var, tem);
4564 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4566 add_local_decl (caller, new_var);
4570 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4571 have brought in or introduced any debug stmts for SRCVAR. */
4573 static inline void
4574 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4576 tree *remappedvarp = id->decl_map->get (srcvar);
4578 if (!remappedvarp)
4579 return;
4581 if (!VAR_P (*remappedvarp))
4582 return;
4584 if (*remappedvarp == id->retvar)
4585 return;
4587 tree tvar = target_for_debug_bind (*remappedvarp);
4588 if (!tvar)
4589 return;
4591 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4592 id->call_stmt);
4593 gimple_seq_add_stmt (bindings, stmt);
4596 /* For each inlined variable for which we may have debug bind stmts,
4597 add before GSI a final debug stmt resetting it, marking the end of
4598 its life, so that var-tracking knows it doesn't have to compute
4599 further locations for it. */
4601 static inline void
4602 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4604 tree var;
4605 unsigned ix;
4606 gimple_seq bindings = NULL;
4608 if (!gimple_in_ssa_p (id->src_cfun))
4609 return;
4611 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4612 return;
4614 for (var = DECL_ARGUMENTS (id->src_fn);
4615 var; var = DECL_CHAIN (var))
4616 reset_debug_binding (id, var, &bindings);
4618 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4619 reset_debug_binding (id, var, &bindings);
4621 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4624 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4626 static bool
4627 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4629 tree use_retvar;
4630 tree fn;
4631 hash_map<tree, tree> *dst;
4632 hash_map<tree, tree> *st = NULL;
4633 tree return_slot;
4634 tree modify_dest;
4635 struct cgraph_edge *cg_edge;
4636 cgraph_inline_failed_t reason;
4637 basic_block return_block;
4638 edge e;
4639 gimple_stmt_iterator gsi, stmt_gsi;
4640 bool successfully_inlined = false;
4641 bool purge_dead_abnormal_edges;
4642 gcall *call_stmt;
4643 unsigned int prop_mask, src_properties;
4644 struct function *dst_cfun;
4645 tree simduid;
4646 use_operand_p use;
4647 gimple *simtenter_stmt = NULL;
4648 vec<tree> *simtvars_save;
4650 /* The gimplifier uses input_location in too many places, such as
4651 internal_get_tmp_var (). */
4652 location_t saved_location = input_location;
4653 input_location = gimple_location (stmt);
4655 /* From here on, we're only interested in CALL_EXPRs. */
4656 call_stmt = dyn_cast <gcall *> (stmt);
4657 if (!call_stmt)
4658 goto egress;
4660 cg_edge = id->dst_node->get_edge (stmt);
4661 gcc_checking_assert (cg_edge);
4662 /* First, see if we can figure out what function is being called.
4663 If we cannot, then there is no hope of inlining the function. */
4664 if (cg_edge->indirect_unknown_callee)
4665 goto egress;
4666 fn = cg_edge->callee->decl;
4667 gcc_checking_assert (fn);
4669 /* If FN is a declaration of a function in a nested scope that was
4670 globally declared inline, we don't set its DECL_INITIAL.
4671 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4672 C++ front-end uses it for cdtors to refer to their internal
4673 declarations, that are not real functions. Fortunately those
4674 don't have trees to be saved, so we can tell by checking their
4675 gimple_body. */
4676 if (!DECL_INITIAL (fn)
4677 && DECL_ABSTRACT_ORIGIN (fn)
4678 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4679 fn = DECL_ABSTRACT_ORIGIN (fn);
4681 /* Don't try to inline functions that are not well-suited to inlining. */
4682 if (cg_edge->inline_failed)
4684 reason = cg_edge->inline_failed;
4685 /* If this call was originally indirect, we do not want to emit any
4686 inlining related warnings or sorry messages because there are no
4687 guarantees regarding those. */
4688 if (cg_edge->indirect_inlining_edge)
4689 goto egress;
4691 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4692 /* For extern inline functions that get redefined we always
4693 silently ignored always_inline flag. Better behavior would
4694 be to be able to keep both bodies and use extern inline body
4695 for inlining, but we can't do that because frontends overwrite
4696 the body. */
4697 && !cg_edge->callee->redefined_extern_inline
4698 /* During early inline pass, report only when optimization is
4699 not turned on. */
4700 && (symtab->global_info_ready
4701 || !optimize
4702 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4703 /* PR 20090218-1_0.c. Body can be provided by another module. */
4704 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4706 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4707 cgraph_inline_failed_string (reason));
4708 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4709 inform (gimple_location (stmt), "called from here");
4710 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4711 inform (DECL_SOURCE_LOCATION (cfun->decl),
4712 "called from this function");
4714 else if (warn_inline
4715 && DECL_DECLARED_INLINE_P (fn)
4716 && !DECL_NO_INLINE_WARNING_P (fn)
4717 && !DECL_IN_SYSTEM_HEADER (fn)
4718 && reason != CIF_UNSPECIFIED
4719 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4720 /* Do not warn about not inlined recursive calls. */
4721 && !cg_edge->recursive_p ()
4722 /* Avoid warnings during early inline pass. */
4723 && symtab->global_info_ready)
4725 auto_diagnostic_group d;
4726 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4727 fn, _(cgraph_inline_failed_string (reason))))
4729 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4730 inform (gimple_location (stmt), "called from here");
4731 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4732 inform (DECL_SOURCE_LOCATION (cfun->decl),
4733 "called from this function");
4736 goto egress;
4738 id->src_node = cg_edge->callee;
4740 /* If callee is thunk, all we need is to adjust the THIS pointer
4741 and redirect to function being thunked. */
4742 if (id->src_node->thunk.thunk_p)
4744 cgraph_edge *edge;
4745 tree virtual_offset = NULL;
4746 profile_count count = cg_edge->count;
4747 tree op;
4748 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4750 cg_edge->remove ();
4751 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4752 gimple_uid (stmt),
4753 profile_count::one (),
4754 profile_count::one (),
4755 true);
4756 edge->count = count;
4757 if (id->src_node->thunk.virtual_offset_p)
4758 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4759 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4760 NULL);
4761 gsi_insert_before (&iter, gimple_build_assign (op,
4762 gimple_call_arg (stmt, 0)),
4763 GSI_NEW_STMT);
4764 gcc_assert (id->src_node->thunk.this_adjusting);
4765 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4766 virtual_offset, id->src_node->thunk.indirect_offset);
4768 gimple_call_set_arg (stmt, 0, op);
4769 gimple_call_set_fndecl (stmt, edge->callee->decl);
4770 update_stmt (stmt);
4771 id->src_node->remove ();
4772 expand_call_inline (bb, stmt, id);
4773 maybe_remove_unused_call_args (cfun, stmt);
4774 return true;
4776 fn = cg_edge->callee->decl;
4777 cg_edge->callee->get_untransformed_body ();
4779 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4780 cg_edge->callee->verify ();
4782 /* We will be inlining this callee. */
4783 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4785 /* Update the callers EH personality. */
4786 if (DECL_FUNCTION_PERSONALITY (fn))
4787 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4788 = DECL_FUNCTION_PERSONALITY (fn);
4790 /* Split the block before the GIMPLE_CALL. */
4791 stmt_gsi = gsi_for_stmt (stmt);
4792 gsi_prev (&stmt_gsi);
4793 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4794 bb = e->src;
4795 return_block = e->dest;
4796 remove_edge (e);
4798 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4799 been the source of abnormal edges. In this case, schedule
4800 the removal of dead abnormal edges. */
4801 gsi = gsi_start_bb (return_block);
4802 gsi_next (&gsi);
4803 purge_dead_abnormal_edges = gsi_end_p (gsi);
4805 stmt_gsi = gsi_start_bb (return_block);
4807 /* Build a block containing code to initialize the arguments, the
4808 actual inline expansion of the body, and a label for the return
4809 statements within the function to jump to. The type of the
4810 statement expression is the return type of the function call.
4811 ??? If the call does not have an associated block then we will
4812 remap all callee blocks to NULL, effectively dropping most of
4813 its debug information. This should only happen for calls to
4814 artificial decls inserted by the compiler itself. We need to
4815 either link the inlined blocks into the caller block tree or
4816 not refer to them in any way to not break GC for locations. */
4817 if (tree block = gimple_block (stmt))
4819 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4820 to make inlined_function_outer_scope_p return true on this BLOCK. */
4821 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4822 if (loc == UNKNOWN_LOCATION)
4823 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4824 if (loc == UNKNOWN_LOCATION)
4825 loc = BUILTINS_LOCATION;
4826 id->block = make_node (BLOCK);
4827 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4828 BLOCK_SOURCE_LOCATION (id->block) = loc;
4829 prepend_lexical_block (block, id->block);
4832 /* Local declarations will be replaced by their equivalents in this map. */
4833 st = id->decl_map;
4834 id->decl_map = new hash_map<tree, tree>;
4835 dst = id->debug_map;
4836 id->debug_map = NULL;
4837 if (flag_stack_reuse != SR_NONE)
4838 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4840 /* Record the function we are about to inline. */
4841 id->src_fn = fn;
4842 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4843 id->reset_location = DECL_IGNORED_P (fn);
4844 id->call_stmt = call_stmt;
4846 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4847 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4848 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4849 simtvars_save = id->dst_simt_vars;
4850 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4851 && (simduid = bb->loop_father->simduid) != NULL_TREE
4852 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4853 && single_imm_use (simduid, &use, &simtenter_stmt)
4854 && is_gimple_call (simtenter_stmt)
4855 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4856 vec_alloc (id->dst_simt_vars, 0);
4857 else
4858 id->dst_simt_vars = NULL;
4860 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4861 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4863 /* If the src function contains an IFN_VA_ARG, then so will the dst
4864 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4865 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4866 src_properties = id->src_cfun->curr_properties & prop_mask;
4867 if (src_properties != prop_mask)
4868 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4869 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4871 gcc_assert (!id->src_cfun->after_inlining);
4873 id->entry_bb = bb;
4874 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4876 gimple_stmt_iterator si = gsi_last_bb (bb);
4877 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4878 NOT_TAKEN),
4879 GSI_NEW_STMT);
4881 initialize_inlined_parameters (id, stmt, fn, bb);
4882 if (debug_nonbind_markers_p && debug_inline_points && id->block
4883 && inlined_function_outer_scope_p (id->block))
4885 gimple_stmt_iterator si = gsi_last_bb (bb);
4886 gsi_insert_after (&si, gimple_build_debug_inline_entry
4887 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4888 GSI_NEW_STMT);
4891 if (DECL_INITIAL (fn))
4893 if (gimple_block (stmt))
4895 tree *var;
4897 prepend_lexical_block (id->block,
4898 remap_blocks (DECL_INITIAL (fn), id));
4899 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4900 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4901 == NULL_TREE));
4902 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4903 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4904 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4905 under it. The parameters can be then evaluated in the debugger,
4906 but don't show in backtraces. */
4907 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4908 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4910 tree v = *var;
4911 *var = TREE_CHAIN (v);
4912 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4913 BLOCK_VARS (id->block) = v;
4915 else
4916 var = &TREE_CHAIN (*var);
4918 else
4919 remap_blocks_to_null (DECL_INITIAL (fn), id);
4922 /* Return statements in the function body will be replaced by jumps
4923 to the RET_LABEL. */
4924 gcc_assert (DECL_INITIAL (fn));
4925 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4927 /* Find the LHS to which the result of this call is assigned. */
4928 return_slot = NULL;
4929 if (gimple_call_lhs (stmt))
4931 modify_dest = gimple_call_lhs (stmt);
4933 /* The function which we are inlining might not return a value,
4934 in which case we should issue a warning that the function
4935 does not return a value. In that case the optimizers will
4936 see that the variable to which the value is assigned was not
4937 initialized. We do not want to issue a warning about that
4938 uninitialized variable. */
4939 if (DECL_P (modify_dest))
4940 TREE_NO_WARNING (modify_dest) = 1;
4942 if (gimple_call_return_slot_opt_p (call_stmt))
4944 return_slot = modify_dest;
4945 modify_dest = NULL;
4948 else
4949 modify_dest = NULL;
4951 /* If we are inlining a call to the C++ operator new, we don't want
4952 to use type based alias analysis on the return value. Otherwise
4953 we may get confused if the compiler sees that the inlined new
4954 function returns a pointer which was just deleted. See bug
4955 33407. */
4956 if (DECL_IS_OPERATOR_NEW_P (fn))
4958 return_slot = NULL;
4959 modify_dest = NULL;
4962 /* Declare the return variable for the function. */
4963 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4965 /* Add local vars in this inlined callee to caller. */
4966 add_local_variables (id->src_cfun, cfun, id);
4968 if (id->src_node->clone.performed_splits)
4970 /* Any calls from the inlined function will be turned into calls from the
4971 function we inline into. We must preserve notes about how to split
4972 parameters such calls should be redirected/updated. */
4973 unsigned len = vec_safe_length (id->src_node->clone.performed_splits);
4974 for (unsigned i = 0; i < len; i++)
4976 ipa_param_performed_split ps
4977 = (*id->src_node->clone.performed_splits)[i];
4978 ps.dummy_decl = remap_decl (ps.dummy_decl, id);
4979 vec_safe_push (id->dst_node->clone.performed_splits, ps);
4982 if (flag_checking)
4984 len = vec_safe_length (id->dst_node->clone.performed_splits);
4985 for (unsigned i = 0; i < len; i++)
4987 ipa_param_performed_split *ps1
4988 = &(*id->dst_node->clone.performed_splits)[i];
4989 for (unsigned j = i + 1; j < len; j++)
4991 ipa_param_performed_split *ps2
4992 = &(*id->dst_node->clone.performed_splits)[j];
4993 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
4994 || ps1->unit_offset != ps2->unit_offset);
5000 if (dump_enabled_p ())
5002 char buf[128];
5003 snprintf (buf, sizeof(buf), "%4.2f",
5004 cg_edge->sreal_frequency ().to_double ());
5005 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5006 call_stmt,
5007 "Inlining %C to %C with frequency %s\n",
5008 id->src_node, id->dst_node, buf);
5009 if (dump_file && (dump_flags & TDF_DETAILS))
5011 id->src_node->dump (dump_file);
5012 id->dst_node->dump (dump_file);
5016 /* This is it. Duplicate the callee body. Assume callee is
5017 pre-gimplified. Note that we must not alter the caller
5018 function in any way before this point, as this CALL_EXPR may be
5019 a self-referential call; if we're calling ourselves, we need to
5020 duplicate our body before altering anything. */
5021 copy_body (id, bb, return_block, NULL);
5023 reset_debug_bindings (id, stmt_gsi);
5025 if (flag_stack_reuse != SR_NONE)
5026 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5027 if (!TREE_THIS_VOLATILE (p))
5029 tree *varp = id->decl_map->get (p);
5030 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
5032 tree clobber = build_clobber (TREE_TYPE (*varp));
5033 gimple *clobber_stmt;
5034 clobber_stmt = gimple_build_assign (*varp, clobber);
5035 gimple_set_location (clobber_stmt, gimple_location (stmt));
5036 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5040 /* Reset the escaped solution. */
5041 if (cfun->gimple_df)
5042 pt_solution_reset (&cfun->gimple_df->escaped);
5044 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5045 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5047 size_t nargs = gimple_call_num_args (simtenter_stmt);
5048 vec<tree> *vars = id->dst_simt_vars;
5049 auto_vec<tree> newargs (nargs + vars->length ());
5050 for (size_t i = 0; i < nargs; i++)
5051 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5052 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5054 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5055 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5057 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5058 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5059 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5060 gsi_replace (&gsi, g, false);
5062 vec_free (id->dst_simt_vars);
5063 id->dst_simt_vars = simtvars_save;
5065 /* Clean up. */
5066 if (id->debug_map)
5068 delete id->debug_map;
5069 id->debug_map = dst;
5071 delete id->decl_map;
5072 id->decl_map = st;
5074 /* Unlink the calls virtual operands before replacing it. */
5075 unlink_stmt_vdef (stmt);
5076 if (gimple_vdef (stmt)
5077 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5078 release_ssa_name (gimple_vdef (stmt));
5080 /* If the inlined function returns a result that we care about,
5081 substitute the GIMPLE_CALL with an assignment of the return
5082 variable to the LHS of the call. That is, if STMT was
5083 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5084 if (use_retvar && gimple_call_lhs (stmt))
5086 gimple *old_stmt = stmt;
5087 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5088 gimple_set_location (stmt, gimple_location (old_stmt));
5089 gsi_replace (&stmt_gsi, stmt, false);
5090 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5091 /* Append a clobber for id->retvar if easily possible. */
5092 if (flag_stack_reuse != SR_NONE
5093 && id->retvar
5094 && VAR_P (id->retvar)
5095 && id->retvar != return_slot
5096 && id->retvar != modify_dest
5097 && !TREE_THIS_VOLATILE (id->retvar)
5098 && !is_gimple_reg (id->retvar)
5099 && !stmt_ends_bb_p (stmt))
5101 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5102 gimple *clobber_stmt;
5103 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5104 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5105 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5108 else
5110 /* Handle the case of inlining a function with no return
5111 statement, which causes the return value to become undefined. */
5112 if (gimple_call_lhs (stmt)
5113 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5115 tree name = gimple_call_lhs (stmt);
5116 tree var = SSA_NAME_VAR (name);
5117 tree def = var ? ssa_default_def (cfun, var) : NULL;
5119 if (def)
5121 /* If the variable is used undefined, make this name
5122 undefined via a move. */
5123 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5124 gsi_replace (&stmt_gsi, stmt, true);
5126 else
5128 if (!var)
5130 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5131 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5133 /* Otherwise make this variable undefined. */
5134 gsi_remove (&stmt_gsi, true);
5135 set_ssa_default_def (cfun, var, name);
5136 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5139 /* Replace with a clobber for id->retvar. */
5140 else if (flag_stack_reuse != SR_NONE
5141 && id->retvar
5142 && VAR_P (id->retvar)
5143 && id->retvar != return_slot
5144 && id->retvar != modify_dest
5145 && !TREE_THIS_VOLATILE (id->retvar)
5146 && !is_gimple_reg (id->retvar))
5148 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5149 gimple *clobber_stmt;
5150 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5151 gimple_set_location (clobber_stmt, gimple_location (stmt));
5152 gsi_replace (&stmt_gsi, clobber_stmt, false);
5153 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5155 else
5156 gsi_remove (&stmt_gsi, true);
5159 if (purge_dead_abnormal_edges)
5161 gimple_purge_dead_eh_edges (return_block);
5162 gimple_purge_dead_abnormal_call_edges (return_block);
5165 /* If the value of the new expression is ignored, that's OK. We
5166 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5167 the equivalent inlined version either. */
5168 if (is_gimple_assign (stmt))
5170 gcc_assert (gimple_assign_single_p (stmt)
5171 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5172 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5175 id->add_clobbers_to_eh_landing_pads = 0;
5177 /* Output the inlining info for this abstract function, since it has been
5178 inlined. If we don't do this now, we can lose the information about the
5179 variables in the function when the blocks get blown away as soon as we
5180 remove the cgraph node. */
5181 if (gimple_block (stmt))
5182 (*debug_hooks->outlining_inline_function) (fn);
5184 /* Update callgraph if needed. */
5185 cg_edge->callee->remove ();
5187 id->block = NULL_TREE;
5188 id->retvar = NULL_TREE;
5189 successfully_inlined = true;
5191 egress:
5192 input_location = saved_location;
5193 return successfully_inlined;
5196 /* Expand call statements reachable from STMT_P.
5197 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5198 in a MODIFY_EXPR. */
5200 static bool
5201 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
5203 gimple_stmt_iterator gsi;
5204 bool inlined = false;
5206 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5208 gimple *stmt = gsi_stmt (gsi);
5209 gsi_prev (&gsi);
5211 if (is_gimple_call (stmt)
5212 && !gimple_call_internal_p (stmt))
5213 inlined |= expand_call_inline (bb, stmt, id);
5216 return inlined;
5220 /* Walk all basic blocks created after FIRST and try to fold every statement
5221 in the STATEMENTS pointer set. */
5223 static void
5224 fold_marked_statements (int first, hash_set<gimple *> *statements)
5226 for (; first < last_basic_block_for_fn (cfun); first++)
5227 if (BASIC_BLOCK_FOR_FN (cfun, first))
5229 gimple_stmt_iterator gsi;
5231 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5232 !gsi_end_p (gsi);
5233 gsi_next (&gsi))
5234 if (statements->contains (gsi_stmt (gsi)))
5236 gimple *old_stmt = gsi_stmt (gsi);
5237 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5239 if (old_decl && fndecl_built_in_p (old_decl))
5241 /* Folding builtins can create multiple instructions,
5242 we need to look at all of them. */
5243 gimple_stmt_iterator i2 = gsi;
5244 gsi_prev (&i2);
5245 if (fold_stmt (&gsi))
5247 gimple *new_stmt;
5248 /* If a builtin at the end of a bb folded into nothing,
5249 the following loop won't work. */
5250 if (gsi_end_p (gsi))
5252 cgraph_update_edges_for_call_stmt (old_stmt,
5253 old_decl, NULL);
5254 break;
5256 if (gsi_end_p (i2))
5257 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5258 else
5259 gsi_next (&i2);
5260 while (1)
5262 new_stmt = gsi_stmt (i2);
5263 update_stmt (new_stmt);
5264 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5265 new_stmt);
5267 if (new_stmt == gsi_stmt (gsi))
5269 /* It is okay to check only for the very last
5270 of these statements. If it is a throwing
5271 statement nothing will change. If it isn't
5272 this can remove EH edges. If that weren't
5273 correct then because some intermediate stmts
5274 throw, but not the last one. That would mean
5275 we'd have to split the block, which we can't
5276 here and we'd loose anyway. And as builtins
5277 probably never throw, this all
5278 is mood anyway. */
5279 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5280 new_stmt))
5281 gimple_purge_dead_eh_edges (
5282 BASIC_BLOCK_FOR_FN (cfun, first));
5283 break;
5285 gsi_next (&i2);
5289 else if (fold_stmt (&gsi))
5291 /* Re-read the statement from GSI as fold_stmt() may
5292 have changed it. */
5293 gimple *new_stmt = gsi_stmt (gsi);
5294 update_stmt (new_stmt);
5296 if (is_gimple_call (old_stmt)
5297 || is_gimple_call (new_stmt))
5298 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5299 new_stmt);
5301 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5302 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5303 first));
5309 /* Expand calls to inline functions in the body of FN. */
5311 unsigned int
5312 optimize_inline_calls (tree fn)
5314 copy_body_data id;
5315 basic_block bb;
5316 int last = n_basic_blocks_for_fn (cfun);
5317 bool inlined_p = false;
5319 /* Clear out ID. */
5320 memset (&id, 0, sizeof (id));
5322 id.src_node = id.dst_node = cgraph_node::get (fn);
5323 gcc_assert (id.dst_node->definition);
5324 id.dst_fn = fn;
5325 /* Or any functions that aren't finished yet. */
5326 if (current_function_decl)
5327 id.dst_fn = current_function_decl;
5329 id.copy_decl = copy_decl_maybe_to_var;
5330 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5331 id.transform_new_cfg = false;
5332 id.transform_return_to_modify = true;
5333 id.transform_parameter = true;
5334 id.transform_lang_insert_block = NULL;
5335 id.statements_to_fold = new hash_set<gimple *>;
5337 push_gimplify_context ();
5339 /* We make no attempts to keep dominance info up-to-date. */
5340 free_dominance_info (CDI_DOMINATORS);
5341 free_dominance_info (CDI_POST_DOMINATORS);
5343 /* Register specific gimple functions. */
5344 gimple_register_cfg_hooks ();
5346 /* Reach the trees by walking over the CFG, and note the
5347 enclosing basic-blocks in the call edges. */
5348 /* We walk the blocks going forward, because inlined function bodies
5349 will split id->current_basic_block, and the new blocks will
5350 follow it; we'll trudge through them, processing their CALL_EXPRs
5351 along the way. */
5352 FOR_EACH_BB_FN (bb, cfun)
5353 inlined_p |= gimple_expand_calls_inline (bb, &id);
5355 pop_gimplify_context (NULL);
5357 if (flag_checking)
5359 struct cgraph_edge *e;
5361 id.dst_node->verify ();
5363 /* Double check that we inlined everything we are supposed to inline. */
5364 for (e = id.dst_node->callees; e; e = e->next_callee)
5365 gcc_assert (e->inline_failed);
5368 /* Fold queued statements. */
5369 update_max_bb_count ();
5370 fold_marked_statements (last, id.statements_to_fold);
5371 delete id.statements_to_fold;
5373 gcc_assert (!id.debug_stmts.exists ());
5375 /* If we didn't inline into the function there is nothing to do. */
5376 if (!inlined_p)
5377 return 0;
5379 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5380 number_blocks (fn);
5382 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5384 if (flag_checking)
5385 id.dst_node->verify ();
5387 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5388 not possible yet - the IPA passes might make various functions to not
5389 throw and they don't care to proactively update local EH info. This is
5390 done later in fixup_cfg pass that also execute the verification. */
5391 return (TODO_update_ssa
5392 | TODO_cleanup_cfg
5393 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5394 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5395 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5396 ? TODO_rebuild_frequencies : 0));
5399 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5401 tree
5402 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5404 enum tree_code code = TREE_CODE (*tp);
5405 enum tree_code_class cl = TREE_CODE_CLASS (code);
5407 /* We make copies of most nodes. */
5408 if (IS_EXPR_CODE_CLASS (cl)
5409 || code == TREE_LIST
5410 || code == TREE_VEC
5411 || code == TYPE_DECL
5412 || code == OMP_CLAUSE)
5414 /* Because the chain gets clobbered when we make a copy, we save it
5415 here. */
5416 tree chain = NULL_TREE, new_tree;
5418 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5419 chain = TREE_CHAIN (*tp);
5421 /* Copy the node. */
5422 new_tree = copy_node (*tp);
5424 *tp = new_tree;
5426 /* Now, restore the chain, if appropriate. That will cause
5427 walk_tree to walk into the chain as well. */
5428 if (code == PARM_DECL
5429 || code == TREE_LIST
5430 || code == OMP_CLAUSE)
5431 TREE_CHAIN (*tp) = chain;
5433 /* For now, we don't update BLOCKs when we make copies. So, we
5434 have to nullify all BIND_EXPRs. */
5435 if (TREE_CODE (*tp) == BIND_EXPR)
5436 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5438 else if (code == CONSTRUCTOR)
5440 /* CONSTRUCTOR nodes need special handling because
5441 we need to duplicate the vector of elements. */
5442 tree new_tree;
5444 new_tree = copy_node (*tp);
5445 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5446 *tp = new_tree;
5448 else if (code == STATEMENT_LIST)
5449 /* We used to just abort on STATEMENT_LIST, but we can run into them
5450 with statement-expressions (c++/40975). */
5451 copy_statement_list (tp);
5452 else if (TREE_CODE_CLASS (code) == tcc_type)
5453 *walk_subtrees = 0;
5454 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5455 *walk_subtrees = 0;
5456 else if (TREE_CODE_CLASS (code) == tcc_constant)
5457 *walk_subtrees = 0;
5458 return NULL_TREE;
5461 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5462 information indicating to what new SAVE_EXPR this one should be mapped,
5463 use that one. Otherwise, create a new node and enter it in ST. FN is
5464 the function into which the copy will be placed. */
5466 static void
5467 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5469 tree *n;
5470 tree t;
5472 /* See if we already encountered this SAVE_EXPR. */
5473 n = st->get (*tp);
5475 /* If we didn't already remap this SAVE_EXPR, do so now. */
5476 if (!n)
5478 t = copy_node (*tp);
5480 /* Remember this SAVE_EXPR. */
5481 st->put (*tp, t);
5482 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5483 st->put (t, t);
5485 else
5487 /* We've already walked into this SAVE_EXPR; don't do it again. */
5488 *walk_subtrees = 0;
5489 t = *n;
5492 /* Replace this SAVE_EXPR with the copy. */
5493 *tp = t;
5496 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5497 label, copies the declaration and enters it in the splay_tree in DATA (which
5498 is really a 'copy_body_data *'. */
5500 static tree
5501 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5502 bool *handled_ops_p ATTRIBUTE_UNUSED,
5503 struct walk_stmt_info *wi)
5505 copy_body_data *id = (copy_body_data *) wi->info;
5506 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5508 if (stmt)
5510 tree decl = gimple_label_label (stmt);
5512 /* Copy the decl and remember the copy. */
5513 insert_decl_map (id, decl, id->copy_decl (decl, id));
5516 return NULL_TREE;
5519 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5520 struct walk_stmt_info *wi);
5522 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5523 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5524 remaps all local declarations to appropriate replacements in gimple
5525 operands. */
5527 static tree
5528 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5530 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5531 copy_body_data *id = (copy_body_data *) wi->info;
5532 hash_map<tree, tree> *st = id->decl_map;
5533 tree *n;
5534 tree expr = *tp;
5536 /* For recursive invocations this is no longer the LHS itself. */
5537 bool is_lhs = wi->is_lhs;
5538 wi->is_lhs = false;
5540 if (TREE_CODE (expr) == SSA_NAME)
5542 *tp = remap_ssa_name (*tp, id);
5543 *walk_subtrees = 0;
5544 if (is_lhs)
5545 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5547 /* Only a local declaration (variable or label). */
5548 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5549 || TREE_CODE (expr) == LABEL_DECL)
5551 /* Lookup the declaration. */
5552 n = st->get (expr);
5554 /* If it's there, remap it. */
5555 if (n)
5556 *tp = *n;
5557 *walk_subtrees = 0;
5559 else if (TREE_CODE (expr) == STATEMENT_LIST
5560 || TREE_CODE (expr) == BIND_EXPR
5561 || TREE_CODE (expr) == SAVE_EXPR)
5562 gcc_unreachable ();
5563 else if (TREE_CODE (expr) == TARGET_EXPR)
5565 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5566 It's OK for this to happen if it was part of a subtree that
5567 isn't immediately expanded, such as operand 2 of another
5568 TARGET_EXPR. */
5569 if (!TREE_OPERAND (expr, 1))
5571 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5572 TREE_OPERAND (expr, 3) = NULL_TREE;
5575 else if (TREE_CODE (expr) == OMP_CLAUSE)
5577 /* Before the omplower pass completes, some OMP clauses can contain
5578 sequences that are neither copied by gimple_seq_copy nor walked by
5579 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5580 in those situations, we have to copy and process them explicitely. */
5582 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5584 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5585 seq = duplicate_remap_omp_clause_seq (seq, wi);
5586 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5588 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5590 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5591 seq = duplicate_remap_omp_clause_seq (seq, wi);
5592 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5594 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5596 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5597 seq = duplicate_remap_omp_clause_seq (seq, wi);
5598 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5599 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5600 seq = duplicate_remap_omp_clause_seq (seq, wi);
5601 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5605 /* Keep iterating. */
5606 return NULL_TREE;
5610 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5611 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5612 remaps all local declarations to appropriate replacements in gimple
5613 statements. */
5615 static tree
5616 replace_locals_stmt (gimple_stmt_iterator *gsip,
5617 bool *handled_ops_p ATTRIBUTE_UNUSED,
5618 struct walk_stmt_info *wi)
5620 copy_body_data *id = (copy_body_data *) wi->info;
5621 gimple *gs = gsi_stmt (*gsip);
5623 if (gbind *stmt = dyn_cast <gbind *> (gs))
5625 tree block = gimple_bind_block (stmt);
5627 if (block)
5629 remap_block (&block, id);
5630 gimple_bind_set_block (stmt, block);
5633 /* This will remap a lot of the same decls again, but this should be
5634 harmless. */
5635 if (gimple_bind_vars (stmt))
5637 tree old_var, decls = gimple_bind_vars (stmt);
5639 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5640 if (!can_be_nonlocal (old_var, id)
5641 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5642 remap_decl (old_var, id);
5644 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5645 id->prevent_decl_creation_for_types = true;
5646 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5647 id->prevent_decl_creation_for_types = false;
5651 /* Keep iterating. */
5652 return NULL_TREE;
5655 /* Create a copy of SEQ and remap all decls in it. */
5657 static gimple_seq
5658 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5660 if (!seq)
5661 return NULL;
5663 /* If there are any labels in OMP sequences, they can be only referred to in
5664 the sequence itself and therefore we can do both here. */
5665 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5666 gimple_seq copy = gimple_seq_copy (seq);
5667 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5668 return copy;
5671 /* Copies everything in SEQ and replaces variables and labels local to
5672 current_function_decl. */
5674 gimple_seq
5675 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5677 copy_body_data id;
5678 struct walk_stmt_info wi;
5679 gimple_seq copy;
5681 /* There's nothing to do for NULL_TREE. */
5682 if (seq == NULL)
5683 return seq;
5685 /* Set up ID. */
5686 memset (&id, 0, sizeof (id));
5687 id.src_fn = current_function_decl;
5688 id.dst_fn = current_function_decl;
5689 id.src_cfun = cfun;
5690 id.decl_map = new hash_map<tree, tree>;
5691 id.debug_map = NULL;
5693 id.copy_decl = copy_decl_no_change;
5694 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5695 id.transform_new_cfg = false;
5696 id.transform_return_to_modify = false;
5697 id.transform_parameter = false;
5698 id.transform_lang_insert_block = NULL;
5700 /* Walk the tree once to find local labels. */
5701 memset (&wi, 0, sizeof (wi));
5702 hash_set<tree> visited;
5703 wi.info = &id;
5704 wi.pset = &visited;
5705 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5707 copy = gimple_seq_copy (seq);
5709 /* Walk the copy, remapping decls. */
5710 memset (&wi, 0, sizeof (wi));
5711 wi.info = &id;
5712 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5714 /* Clean up. */
5715 delete id.decl_map;
5716 if (id.debug_map)
5717 delete id.debug_map;
5718 if (id.dependence_map)
5720 delete id.dependence_map;
5721 id.dependence_map = NULL;
5724 return copy;
5728 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5730 static tree
5731 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5733 if (*tp == data)
5734 return (tree) data;
5735 else
5736 return NULL;
5739 DEBUG_FUNCTION bool
5740 debug_find_tree (tree top, tree search)
5742 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5746 /* Declare the variables created by the inliner. Add all the variables in
5747 VARS to BIND_EXPR. */
5749 static void
5750 declare_inline_vars (tree block, tree vars)
5752 tree t;
5753 for (t = vars; t; t = DECL_CHAIN (t))
5755 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5756 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5757 add_local_decl (cfun, t);
5760 if (block)
5761 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5764 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5765 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5766 VAR_DECL translation. */
5768 tree
5769 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5771 /* Don't generate debug information for the copy if we wouldn't have
5772 generated it for the copy either. */
5773 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5774 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5776 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5777 declaration inspired this copy. */
5778 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5780 /* The new variable/label has no RTL, yet. */
5781 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5782 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5783 SET_DECL_RTL (copy, 0);
5784 /* For vector typed decls make sure to update DECL_MODE according
5785 to the new function context. */
5786 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5787 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5789 /* These args would always appear unused, if not for this. */
5790 TREE_USED (copy) = 1;
5792 /* Set the context for the new declaration. */
5793 if (!DECL_CONTEXT (decl))
5794 /* Globals stay global. */
5796 else if (DECL_CONTEXT (decl) != id->src_fn)
5797 /* Things that weren't in the scope of the function we're inlining
5798 from aren't in the scope we're inlining to, either. */
5800 else if (TREE_STATIC (decl))
5801 /* Function-scoped static variables should stay in the original
5802 function. */
5804 else
5806 /* Ordinary automatic local variables are now in the scope of the
5807 new function. */
5808 DECL_CONTEXT (copy) = id->dst_fn;
5809 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5811 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5812 DECL_ATTRIBUTES (copy)
5813 = tree_cons (get_identifier ("omp simt private"), NULL,
5814 DECL_ATTRIBUTES (copy));
5815 id->dst_simt_vars->safe_push (copy);
5819 return copy;
5822 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5823 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5824 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5826 tree
5827 copy_decl_to_var (tree decl, copy_body_data *id)
5829 tree copy, type;
5831 gcc_assert (TREE_CODE (decl) == PARM_DECL
5832 || TREE_CODE (decl) == RESULT_DECL);
5834 type = TREE_TYPE (decl);
5836 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5837 VAR_DECL, DECL_NAME (decl), type);
5838 if (DECL_PT_UID_SET_P (decl))
5839 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5840 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5841 TREE_READONLY (copy) = TREE_READONLY (decl);
5842 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5843 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5845 return copy_decl_for_dup_finish (id, decl, copy);
5848 /* Like copy_decl_to_var, but create a return slot object instead of a
5849 pointer variable for return by invisible reference. */
5851 static tree
5852 copy_result_decl_to_var (tree decl, copy_body_data *id)
5854 tree copy, type;
5856 gcc_assert (TREE_CODE (decl) == PARM_DECL
5857 || TREE_CODE (decl) == RESULT_DECL);
5859 type = TREE_TYPE (decl);
5860 if (DECL_BY_REFERENCE (decl))
5861 type = TREE_TYPE (type);
5863 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5864 VAR_DECL, DECL_NAME (decl), type);
5865 if (DECL_PT_UID_SET_P (decl))
5866 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5867 TREE_READONLY (copy) = TREE_READONLY (decl);
5868 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5869 if (!DECL_BY_REFERENCE (decl))
5871 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5872 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5875 return copy_decl_for_dup_finish (id, decl, copy);
5878 tree
5879 copy_decl_no_change (tree decl, copy_body_data *id)
5881 tree copy;
5883 copy = copy_node (decl);
5885 /* The COPY is not abstract; it will be generated in DST_FN. */
5886 DECL_ABSTRACT_P (copy) = false;
5887 lang_hooks.dup_lang_specific_decl (copy);
5889 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5890 been taken; it's for internal bookkeeping in expand_goto_internal. */
5891 if (TREE_CODE (copy) == LABEL_DECL)
5893 TREE_ADDRESSABLE (copy) = 0;
5894 LABEL_DECL_UID (copy) = -1;
5897 return copy_decl_for_dup_finish (id, decl, copy);
5900 static tree
5901 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5903 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5904 return copy_decl_to_var (decl, id);
5905 else
5906 return copy_decl_no_change (decl, id);
5909 /* Return a copy of the function's argument tree without any modifications. */
5911 static tree
5912 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
5914 tree arg, *parg;
5915 tree new_parm = NULL;
5917 parg = &new_parm;
5918 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
5920 tree new_tree = remap_decl (arg, id);
5921 if (TREE_CODE (new_tree) != PARM_DECL)
5922 new_tree = id->copy_decl (arg, id);
5923 lang_hooks.dup_lang_specific_decl (new_tree);
5924 *parg = new_tree;
5925 parg = &DECL_CHAIN (new_tree);
5927 return new_parm;
5930 /* Return a copy of the function's static chain. */
5931 static tree
5932 copy_static_chain (tree static_chain, copy_body_data * id)
5934 tree *chain_copy, *pvar;
5936 chain_copy = &static_chain;
5937 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5939 tree new_tree = remap_decl (*pvar, id);
5940 lang_hooks.dup_lang_specific_decl (new_tree);
5941 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5942 *pvar = new_tree;
5944 return static_chain;
5947 /* Return true if the function is allowed to be versioned.
5948 This is a guard for the versioning functionality. */
5950 bool
5951 tree_versionable_function_p (tree fndecl)
5953 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5954 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5957 /* Update clone info after duplication. */
5959 static void
5960 update_clone_info (copy_body_data * id)
5962 vec<ipa_param_performed_split, va_gc> *cur_performed_splits
5963 = id->dst_node->clone.performed_splits;
5964 if (cur_performed_splits)
5966 unsigned len = cur_performed_splits->length ();
5967 for (unsigned i = 0; i < len; i++)
5969 ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
5970 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
5974 struct cgraph_node *node;
5975 if (!id->dst_node->clones)
5976 return;
5977 for (node = id->dst_node->clones; node != id->dst_node;)
5979 /* First update replace maps to match the new body. */
5980 if (node->clone.tree_map)
5982 unsigned int i;
5983 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5985 struct ipa_replace_map *replace_info;
5986 replace_info = (*node->clone.tree_map)[i];
5987 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5990 if (node->clone.performed_splits)
5992 unsigned len = vec_safe_length (node->clone.performed_splits);
5993 for (unsigned i = 0; i < len; i++)
5995 ipa_param_performed_split *ps
5996 = &(*node->clone.performed_splits)[i];
5997 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6000 if (unsigned len = vec_safe_length (cur_performed_splits))
6002 /* We do not want to add current performed splits when we are saving
6003 a copy of function body for later during inlining, that would just
6004 duplicate all entries. So let's have a look whether anything
6005 referring to the first dummy_decl is present. */
6006 unsigned dst_len = vec_safe_length (node->clone.performed_splits);
6007 ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6008 for (unsigned i = 0; i < dst_len; i++)
6009 if ((*node->clone.performed_splits)[i].dummy_decl
6010 == first->dummy_decl)
6012 len = 0;
6013 break;
6016 for (unsigned i = 0; i < len; i++)
6017 vec_safe_push (node->clone.performed_splits,
6018 (*cur_performed_splits)[i]);
6019 if (flag_checking)
6021 for (unsigned i = 0; i < dst_len; i++)
6023 ipa_param_performed_split *ps1
6024 = &(*node->clone.performed_splits)[i];
6025 for (unsigned j = i + 1; j < dst_len; j++)
6027 ipa_param_performed_split *ps2
6028 = &(*node->clone.performed_splits)[j];
6029 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6030 || ps1->unit_offset != ps2->unit_offset);
6036 if (node->clones)
6037 node = node->clones;
6038 else if (node->next_sibling_clone)
6039 node = node->next_sibling_clone;
6040 else
6042 while (node != id->dst_node && !node->next_sibling_clone)
6043 node = node->clone_of;
6044 if (node != id->dst_node)
6045 node = node->next_sibling_clone;
6050 /* Create a copy of a function's tree.
6051 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6052 of the original function and the new copied function
6053 respectively. In case we want to replace a DECL
6054 tree with another tree while duplicating the function's
6055 body, TREE_MAP represents the mapping between these
6056 trees. If UPDATE_CLONES is set, the call_stmt fields
6057 of edges of clones of the function will be updated.
6059 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6060 function parameters and return value) should be modified).
6061 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6062 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6064 void
6065 tree_function_versioning (tree old_decl, tree new_decl,
6066 vec<ipa_replace_map *, va_gc> *tree_map,
6067 ipa_param_adjustments *param_adjustments,
6068 bool update_clones, bitmap blocks_to_copy,
6069 basic_block new_entry)
6071 struct cgraph_node *old_version_node;
6072 struct cgraph_node *new_version_node;
6073 copy_body_data id;
6074 tree p;
6075 unsigned i;
6076 struct ipa_replace_map *replace_info;
6077 basic_block old_entry_block, bb;
6078 auto_vec<gimple *, 10> init_stmts;
6079 tree vars = NULL_TREE;
6081 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6082 && TREE_CODE (new_decl) == FUNCTION_DECL);
6083 DECL_POSSIBLY_INLINED (old_decl) = 1;
6085 old_version_node = cgraph_node::get (old_decl);
6086 gcc_checking_assert (old_version_node);
6087 new_version_node = cgraph_node::get (new_decl);
6088 gcc_checking_assert (new_version_node);
6090 /* Copy over debug args. */
6091 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6093 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6094 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6095 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6096 old_debug_args = decl_debug_args_lookup (old_decl);
6097 if (old_debug_args)
6099 new_debug_args = decl_debug_args_insert (new_decl);
6100 *new_debug_args = vec_safe_copy (*old_debug_args);
6104 /* Output the inlining info for this abstract function, since it has been
6105 inlined. If we don't do this now, we can lose the information about the
6106 variables in the function when the blocks get blown away as soon as we
6107 remove the cgraph node. */
6108 (*debug_hooks->outlining_inline_function) (old_decl);
6110 DECL_ARTIFICIAL (new_decl) = 1;
6111 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6112 if (DECL_ORIGIN (old_decl) == old_decl)
6113 old_version_node->used_as_abstract_origin = true;
6114 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6116 /* Prepare the data structures for the tree copy. */
6117 memset (&id, 0, sizeof (id));
6119 /* Generate a new name for the new version. */
6120 id.statements_to_fold = new hash_set<gimple *>;
6122 id.decl_map = new hash_map<tree, tree>;
6123 id.debug_map = NULL;
6124 id.src_fn = old_decl;
6125 id.dst_fn = new_decl;
6126 id.src_node = old_version_node;
6127 id.dst_node = new_version_node;
6128 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6129 id.blocks_to_copy = blocks_to_copy;
6131 id.copy_decl = copy_decl_no_change;
6132 id.transform_call_graph_edges
6133 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6134 id.transform_new_cfg = true;
6135 id.transform_return_to_modify = false;
6136 id.transform_parameter = false;
6137 id.transform_lang_insert_block = NULL;
6139 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6140 (DECL_STRUCT_FUNCTION (old_decl));
6141 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6142 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6143 initialize_cfun (new_decl, old_decl,
6144 new_entry ? new_entry->count : old_entry_block->count);
6145 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6146 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6147 = id.src_cfun->gimple_df->ipa_pta;
6149 /* Copy the function's static chain. */
6150 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6151 if (p)
6152 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6153 = copy_static_chain (p, &id);
6155 auto_vec<int, 16> new_param_indices;
6156 ipa_param_adjustments *old_param_adjustments
6157 = old_version_node->clone.param_adjustments;
6158 if (old_param_adjustments)
6159 old_param_adjustments->get_updated_indices (&new_param_indices);
6161 /* If there's a tree_map, prepare for substitution. */
6162 if (tree_map)
6163 for (i = 0; i < tree_map->length (); i++)
6165 gimple *init;
6166 replace_info = (*tree_map)[i];
6168 int p = replace_info->parm_num;
6169 if (old_param_adjustments)
6170 p = new_param_indices[p];
6172 tree parm;
6173 tree req_type, new_type;
6175 for (parm = DECL_ARGUMENTS (old_decl); p;
6176 parm = DECL_CHAIN (parm))
6177 p--;
6178 tree old_tree = parm;
6179 req_type = TREE_TYPE (parm);
6180 new_type = TREE_TYPE (replace_info->new_tree);
6181 if (!useless_type_conversion_p (req_type, new_type))
6183 if (fold_convertible_p (req_type, replace_info->new_tree))
6184 replace_info->new_tree
6185 = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
6186 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6187 replace_info->new_tree
6188 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
6189 replace_info->new_tree);
6190 else
6192 if (dump_file)
6194 fprintf (dump_file, " const ");
6195 print_generic_expr (dump_file,
6196 replace_info->new_tree);
6197 fprintf (dump_file,
6198 " can't be converted to param ");
6199 print_generic_expr (dump_file, parm);
6200 fprintf (dump_file, "\n");
6202 old_tree = NULL;
6206 if (old_tree)
6208 init = setup_one_parameter (&id, old_tree, replace_info->new_tree,
6209 id.src_fn, NULL, &vars);
6210 if (init)
6211 init_stmts.safe_push (init);
6215 ipa_param_body_adjustments *param_body_adjs = NULL;
6216 if (param_adjustments)
6218 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6219 new_decl, old_decl,
6220 &id, &vars, tree_map);
6221 id.param_body_adjs = param_body_adjs;
6222 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6224 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6225 DECL_ARGUMENTS (new_decl)
6226 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6228 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6229 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6231 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6233 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6234 /* Add local vars. */
6235 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6237 if (DECL_RESULT (old_decl) == NULL_TREE)
6239 else if (param_adjustments && param_adjustments->m_skip_return
6240 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6242 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6243 &id);
6244 declare_inline_vars (NULL, resdecl_repl);
6245 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6247 DECL_RESULT (new_decl)
6248 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6249 RESULT_DECL, NULL_TREE, void_type_node);
6250 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6251 DECL_IS_MALLOC (new_decl) = false;
6252 cfun->returns_struct = 0;
6253 cfun->returns_pcc_struct = 0;
6255 else
6257 tree old_name;
6258 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6259 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6260 if (gimple_in_ssa_p (id.src_cfun)
6261 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6262 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6264 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6265 insert_decl_map (&id, old_name, new_name);
6266 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6267 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6271 /* Set up the destination functions loop tree. */
6272 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6274 cfun->curr_properties &= ~PROP_loops;
6275 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6276 cfun->curr_properties |= PROP_loops;
6279 /* Copy the Function's body. */
6280 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6281 new_entry);
6283 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6284 number_blocks (new_decl);
6286 /* We want to create the BB unconditionally, so that the addition of
6287 debug stmts doesn't affect BB count, which may in the end cause
6288 codegen differences. */
6289 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6290 while (init_stmts.length ())
6291 insert_init_stmt (&id, bb, init_stmts.pop ());
6292 update_clone_info (&id);
6294 /* Remap the nonlocal_goto_save_area, if any. */
6295 if (cfun->nonlocal_goto_save_area)
6297 struct walk_stmt_info wi;
6299 memset (&wi, 0, sizeof (wi));
6300 wi.info = &id;
6301 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6304 /* Clean up. */
6305 delete id.decl_map;
6306 if (id.debug_map)
6307 delete id.debug_map;
6308 free_dominance_info (CDI_DOMINATORS);
6309 free_dominance_info (CDI_POST_DOMINATORS);
6311 update_max_bb_count ();
6312 fold_marked_statements (0, id.statements_to_fold);
6313 delete id.statements_to_fold;
6314 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6315 if (id.dst_node->definition)
6316 cgraph_edge::rebuild_references ();
6317 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6319 calculate_dominance_info (CDI_DOMINATORS);
6320 fix_loop_structure (NULL);
6322 update_ssa (TODO_update_ssa);
6324 /* After partial cloning we need to rescale frequencies, so they are
6325 within proper range in the cloned function. */
6326 if (new_entry)
6328 struct cgraph_edge *e;
6329 rebuild_frequencies ();
6331 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6332 for (e = new_version_node->callees; e; e = e->next_callee)
6334 basic_block bb = gimple_bb (e->call_stmt);
6335 e->count = bb->count;
6337 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6339 basic_block bb = gimple_bb (e->call_stmt);
6340 e->count = bb->count;
6344 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6346 vec<tree, va_gc> **debug_args = NULL;
6347 unsigned int len = 0;
6348 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6350 for (i = 0; i < reset_len; i++)
6352 tree parm = param_body_adjs->m_reset_debug_decls[i];
6353 gcc_assert (is_gimple_reg (parm));
6354 tree ddecl;
6356 if (debug_args == NULL)
6358 debug_args = decl_debug_args_insert (new_decl);
6359 len = vec_safe_length (*debug_args);
6361 ddecl = make_node (DEBUG_EXPR_DECL);
6362 DECL_ARTIFICIAL (ddecl) = 1;
6363 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6364 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6365 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6366 vec_safe_push (*debug_args, ddecl);
6368 if (debug_args != NULL)
6370 /* On the callee side, add
6371 DEBUG D#Y s=> parm
6372 DEBUG var => D#Y
6373 stmts to the first bb where var is a VAR_DECL created for the
6374 optimized away parameter in DECL_INITIAL block. This hints
6375 in the debug info that var (whole DECL_ORIGIN is the parm
6376 PARM_DECL) is optimized away, but could be looked up at the
6377 call site as value of D#X there. */
6378 tree vexpr;
6379 gimple_stmt_iterator cgsi
6380 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6381 gimple *def_temp;
6382 tree var = vars;
6383 i = vec_safe_length (*debug_args);
6386 i -= 2;
6387 while (var != NULL_TREE
6388 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6389 var = TREE_CHAIN (var);
6390 if (var == NULL_TREE)
6391 break;
6392 vexpr = make_node (DEBUG_EXPR_DECL);
6393 tree parm = (**debug_args)[i];
6394 DECL_ARTIFICIAL (vexpr) = 1;
6395 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6396 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6397 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6398 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6399 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6400 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6402 while (i > len);
6405 delete param_body_adjs;
6406 free_dominance_info (CDI_DOMINATORS);
6407 free_dominance_info (CDI_POST_DOMINATORS);
6409 gcc_assert (!id.debug_stmts.exists ());
6410 pop_cfun ();
6411 return;
6414 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6415 the callee and return the inlined body on success. */
6417 tree
6418 maybe_inline_call_in_expr (tree exp)
6420 tree fn = get_callee_fndecl (exp);
6422 /* We can only try to inline "const" functions. */
6423 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6425 call_expr_arg_iterator iter;
6426 copy_body_data id;
6427 tree param, arg, t;
6428 hash_map<tree, tree> decl_map;
6430 /* Remap the parameters. */
6431 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6432 param;
6433 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6434 decl_map.put (param, arg);
6436 memset (&id, 0, sizeof (id));
6437 id.src_fn = fn;
6438 id.dst_fn = current_function_decl;
6439 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6440 id.decl_map = &decl_map;
6442 id.copy_decl = copy_decl_no_change;
6443 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6444 id.transform_new_cfg = false;
6445 id.transform_return_to_modify = true;
6446 id.transform_parameter = true;
6447 id.transform_lang_insert_block = NULL;
6449 /* Make sure not to unshare trees behind the front-end's back
6450 since front-end specific mechanisms may rely on sharing. */
6451 id.regimplify = false;
6452 id.do_not_unshare = true;
6454 /* We're not inside any EH region. */
6455 id.eh_lp_nr = 0;
6457 t = copy_tree_body (&id);
6459 /* We can only return something suitable for use in a GENERIC
6460 expression tree. */
6461 if (TREE_CODE (t) == MODIFY_EXPR)
6462 return TREE_OPERAND (t, 1);
6465 return NULL_TREE;
6468 /* Duplicate a type, fields and all. */
6470 tree
6471 build_duplicate_type (tree type)
6473 struct copy_body_data id;
6475 memset (&id, 0, sizeof (id));
6476 id.src_fn = current_function_decl;
6477 id.dst_fn = current_function_decl;
6478 id.src_cfun = cfun;
6479 id.decl_map = new hash_map<tree, tree>;
6480 id.debug_map = NULL;
6481 id.copy_decl = copy_decl_no_change;
6483 type = remap_type_1 (type, &id);
6485 delete id.decl_map;
6486 if (id.debug_map)
6487 delete id.debug_map;
6489 TYPE_CANONICAL (type) = type;
6491 return type;
6494 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6495 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6496 evaluation. */
6498 tree
6499 copy_fn (tree fn, tree& parms, tree& result)
6501 copy_body_data id;
6502 tree param;
6503 hash_map<tree, tree> decl_map;
6505 tree *p = &parms;
6506 *p = NULL_TREE;
6508 memset (&id, 0, sizeof (id));
6509 id.src_fn = fn;
6510 id.dst_fn = current_function_decl;
6511 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6512 id.decl_map = &decl_map;
6514 id.copy_decl = copy_decl_no_change;
6515 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6516 id.transform_new_cfg = false;
6517 id.transform_return_to_modify = false;
6518 id.transform_parameter = true;
6519 id.transform_lang_insert_block = NULL;
6521 /* Make sure not to unshare trees behind the front-end's back
6522 since front-end specific mechanisms may rely on sharing. */
6523 id.regimplify = false;
6524 id.do_not_unshare = true;
6525 id.do_not_fold = true;
6527 /* We're not inside any EH region. */
6528 id.eh_lp_nr = 0;
6530 /* Remap the parameters and result and return them to the caller. */
6531 for (param = DECL_ARGUMENTS (fn);
6532 param;
6533 param = DECL_CHAIN (param))
6535 *p = remap_decl (param, &id);
6536 p = &DECL_CHAIN (*p);
6539 if (DECL_RESULT (fn))
6540 result = remap_decl (DECL_RESULT (fn), &id);
6541 else
6542 result = NULL_TREE;
6544 return copy_tree_body (&id);