For obj-c stage-final re-use the checksum from the previous stage
[official-gcc.git] / gcc / tree-inline.c
blob4250fd86487f34cebc973d9d0562d45da35db4af
1 /* Tree inlining.
2 Copyright (C) 2001-2021 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64 #include "alloc-pool.h"
65 #include "symbol-summary.h"
66 #include "symtab-thunks.h"
67 #include "symtab-clones.h"
69 /* I'm not real happy about this, but we need to handle gimple and
70 non-gimple trees. */
72 /* Inlining, Cloning, Versioning, Parallelization
74 Inlining: a function body is duplicated, but the PARM_DECLs are
75 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
76 MODIFY_EXPRs that store to a dedicated returned-value variable.
77 The duplicated eh_region info of the copy will later be appended
78 to the info for the caller; the eh_region info in copied throwing
79 statements and RESX statements are adjusted accordingly.
81 Cloning: (only in C++) We have one body for a con/de/structor, and
82 multiple function decls, each with a unique parameter list.
83 Duplicate the body, using the given splay tree; some parameters
84 will become constants (like 0 or 1).
86 Versioning: a function body is duplicated and the result is a new
87 function rather than into blocks of an existing function as with
88 inlining. Some parameters will become constants.
90 Parallelization: a region of a function is duplicated resulting in
91 a new function. Variables may be replaced with complex expressions
92 to enable shared variable semantics.
94 All of these will simultaneously lookup any callgraph edges. If
95 we're going to inline the duplicated function body, and the given
96 function has some cloned callgraph nodes (one for each place this
97 function will be inlined) those callgraph edges will be duplicated.
98 If we're cloning the body, those callgraph edges will be
99 updated to point into the new body. (Note that the original
100 callgraph node and edge list will not be altered.)
102 See the CALL_EXPR handling case in copy_tree_body_r (). */
104 /* To Do:
106 o In order to make inlining-on-trees work, we pessimized
107 function-local static constants. In particular, they are now
108 always output, even when not addressed. Fix this by treating
109 function-local static constants just like global static
110 constants; the back-end already knows not to output them if they
111 are not needed.
113 o Provide heuristics to clamp inlining of recursive template
114 calls? */
117 /* Weights that estimate_num_insns uses to estimate the size of the
118 produced code. */
120 eni_weights eni_size_weights;
122 /* Weights that estimate_num_insns uses to estimate the time necessary
123 to execute the produced code. */
125 eni_weights eni_time_weights;
127 /* Prototypes. */
129 static tree declare_return_variable (copy_body_data *, tree, tree,
130 basic_block);
131 static void remap_block (tree *, copy_body_data *);
132 static void copy_bind_expr (tree *, int *, copy_body_data *);
133 static void declare_inline_vars (tree, tree);
134 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
135 static void prepend_lexical_block (tree current_block, tree new_block);
136 static tree copy_result_decl_to_var (tree, copy_body_data *);
137 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
138 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
139 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
141 /* Insert a tree->tree mapping for ID. Despite the name suggests
142 that the trees should be variables, it is used for more than that. */
144 void
145 insert_decl_map (copy_body_data *id, tree key, tree value)
147 id->decl_map->put (key, value);
149 /* Always insert an identity map as well. If we see this same new
150 node again, we won't want to duplicate it a second time. */
151 if (key != value)
152 id->decl_map->put (value, value);
155 /* If nonzero, we're remapping the contents of inlined debug
156 statements. If negative, an error has occurred, such as a
157 reference to a variable that isn't available in the inlined
158 context. */
159 static int processing_debug_stmt = 0;
161 /* Construct new SSA name for old NAME. ID is the inline context. */
163 static tree
164 remap_ssa_name (tree name, copy_body_data *id)
166 tree new_tree, var;
167 tree *n;
169 gcc_assert (TREE_CODE (name) == SSA_NAME);
171 n = id->decl_map->get (name);
172 if (n)
174 /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
175 remove an unused LHS from a call statement. Such LHS can however
176 still appear in debug statements, but their value is lost in this
177 function and we do not want to map them. */
178 if (id->killed_new_ssa_names
179 && id->killed_new_ssa_names->contains (*n))
181 gcc_assert (processing_debug_stmt);
182 processing_debug_stmt = -1;
183 return name;
186 return unshare_expr (*n);
189 if (processing_debug_stmt)
191 if (SSA_NAME_IS_DEFAULT_DEF (name)
192 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
193 && id->entry_bb == NULL
194 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
196 tree vexpr = make_node (DEBUG_EXPR_DECL);
197 gimple *def_temp;
198 gimple_stmt_iterator gsi;
199 tree val = SSA_NAME_VAR (name);
201 n = id->decl_map->get (val);
202 if (n != NULL)
203 val = *n;
204 if (TREE_CODE (val) != PARM_DECL
205 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
207 processing_debug_stmt = -1;
208 return name;
210 n = id->decl_map->get (val);
211 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
212 return *n;
213 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
214 DECL_ARTIFICIAL (vexpr) = 1;
215 TREE_TYPE (vexpr) = TREE_TYPE (name);
216 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
217 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
218 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
219 insert_decl_map (id, val, vexpr);
220 return vexpr;
223 processing_debug_stmt = -1;
224 return name;
227 /* Remap anonymous SSA names or SSA names of anonymous decls. */
228 var = SSA_NAME_VAR (name);
229 if (!var
230 || (!SSA_NAME_IS_DEFAULT_DEF (name)
231 && VAR_P (var)
232 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
233 && DECL_ARTIFICIAL (var)
234 && DECL_IGNORED_P (var)
235 && !DECL_NAME (var)))
237 struct ptr_info_def *pi;
238 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
239 if (!var && SSA_NAME_IDENTIFIER (name))
240 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
241 insert_decl_map (id, name, new_tree);
242 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
243 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
244 /* At least IPA points-to info can be directly transferred. */
245 if (id->src_cfun->gimple_df
246 && id->src_cfun->gimple_df->ipa_pta
247 && POINTER_TYPE_P (TREE_TYPE (name))
248 && (pi = SSA_NAME_PTR_INFO (name))
249 && !pi->pt.anything)
251 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
252 new_pi->pt = pi->pt;
254 /* So can range-info. */
255 if (!POINTER_TYPE_P (TREE_TYPE (name))
256 && SSA_NAME_RANGE_INFO (name))
257 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
258 SSA_NAME_RANGE_INFO (name));
259 return new_tree;
262 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
263 in copy_bb. */
264 new_tree = remap_decl (var, id);
266 /* We might've substituted constant or another SSA_NAME for
267 the variable.
269 Replace the SSA name representing RESULT_DECL by variable during
270 inlining: this saves us from need to introduce PHI node in a case
271 return value is just partly initialized. */
272 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
273 && (!SSA_NAME_VAR (name)
274 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
275 || !id->transform_return_to_modify))
277 struct ptr_info_def *pi;
278 new_tree = make_ssa_name (new_tree);
279 insert_decl_map (id, name, new_tree);
280 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
281 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
282 /* At least IPA points-to info can be directly transferred. */
283 if (id->src_cfun->gimple_df
284 && id->src_cfun->gimple_df->ipa_pta
285 && POINTER_TYPE_P (TREE_TYPE (name))
286 && (pi = SSA_NAME_PTR_INFO (name))
287 && !pi->pt.anything)
289 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
290 new_pi->pt = pi->pt;
292 /* So can range-info. */
293 if (!POINTER_TYPE_P (TREE_TYPE (name))
294 && SSA_NAME_RANGE_INFO (name))
295 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
296 SSA_NAME_RANGE_INFO (name));
297 if (SSA_NAME_IS_DEFAULT_DEF (name))
299 /* By inlining function having uninitialized variable, we might
300 extend the lifetime (variable might get reused). This cause
301 ICE in the case we end up extending lifetime of SSA name across
302 abnormal edge, but also increase register pressure.
304 We simply initialize all uninitialized vars by 0 except
305 for case we are inlining to very first BB. We can avoid
306 this for all BBs that are not inside strongly connected
307 regions of the CFG, but this is expensive to test. */
308 if (id->entry_bb
309 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
310 && (!SSA_NAME_VAR (name)
311 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
312 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
313 0)->dest
314 || EDGE_COUNT (id->entry_bb->preds) != 1))
316 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
317 gimple *init_stmt;
318 tree zero = build_zero_cst (TREE_TYPE (new_tree));
320 init_stmt = gimple_build_assign (new_tree, zero);
321 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
322 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
324 else
326 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
327 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
331 else
332 insert_decl_map (id, name, new_tree);
333 return new_tree;
336 /* Remap DECL during the copying of the BLOCK tree for the function. */
338 tree
339 remap_decl (tree decl, copy_body_data *id)
341 tree *n;
343 /* We only remap local variables in the current function. */
345 /* See if we have remapped this declaration. */
347 n = id->decl_map->get (decl);
349 if (!n && processing_debug_stmt)
351 processing_debug_stmt = -1;
352 return decl;
355 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
356 necessary DECLs have already been remapped and we do not want to duplicate
357 a decl coming from outside of the sequence we are copying. */
358 if (!n
359 && id->prevent_decl_creation_for_types
360 && id->remapping_type_depth > 0
361 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
362 return decl;
364 /* If we didn't already have an equivalent for this declaration, create one
365 now. */
366 if (!n)
368 /* Make a copy of the variable or label. */
369 tree t = id->copy_decl (decl, id);
371 /* Remember it, so that if we encounter this local entity again
372 we can reuse this copy. Do this early because remap_type may
373 need this decl for TYPE_STUB_DECL. */
374 insert_decl_map (id, decl, t);
376 if (!DECL_P (t))
377 return t;
379 /* Remap types, if necessary. */
380 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
381 if (TREE_CODE (t) == TYPE_DECL)
383 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
385 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
386 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
387 is not set on the TYPE_DECL, for example in LTO mode. */
388 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
390 tree x = build_variant_type_copy (TREE_TYPE (t));
391 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
392 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
393 DECL_ORIGINAL_TYPE (t) = x;
397 /* Remap sizes as necessary. */
398 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
399 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
401 /* If fields, do likewise for offset and qualifier. */
402 if (TREE_CODE (t) == FIELD_DECL)
404 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
405 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
406 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
409 return t;
412 if (id->do_not_unshare)
413 return *n;
414 else
415 return unshare_expr (*n);
418 static tree
419 remap_type_1 (tree type, copy_body_data *id)
421 tree new_tree, t;
423 /* We do need a copy. build and register it now. If this is a pointer or
424 reference type, remap the designated type and make a new pointer or
425 reference type. */
426 if (TREE_CODE (type) == POINTER_TYPE)
428 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
429 TYPE_MODE (type),
430 TYPE_REF_CAN_ALIAS_ALL (type));
431 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
432 new_tree = build_type_attribute_qual_variant (new_tree,
433 TYPE_ATTRIBUTES (type),
434 TYPE_QUALS (type));
435 insert_decl_map (id, type, new_tree);
436 return new_tree;
438 else if (TREE_CODE (type) == REFERENCE_TYPE)
440 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
441 TYPE_MODE (type),
442 TYPE_REF_CAN_ALIAS_ALL (type));
443 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
444 new_tree = build_type_attribute_qual_variant (new_tree,
445 TYPE_ATTRIBUTES (type),
446 TYPE_QUALS (type));
447 insert_decl_map (id, type, new_tree);
448 return new_tree;
450 else
451 new_tree = copy_node (type);
453 insert_decl_map (id, type, new_tree);
455 /* This is a new type, not a copy of an old type. Need to reassociate
456 variants. We can handle everything except the main variant lazily. */
457 t = TYPE_MAIN_VARIANT (type);
458 if (type != t)
460 t = remap_type (t, id);
461 TYPE_MAIN_VARIANT (new_tree) = t;
462 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
463 TYPE_NEXT_VARIANT (t) = new_tree;
465 else
467 TYPE_MAIN_VARIANT (new_tree) = new_tree;
468 TYPE_NEXT_VARIANT (new_tree) = NULL;
471 if (TYPE_STUB_DECL (type))
472 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
474 /* Lazily create pointer and reference types. */
475 TYPE_POINTER_TO (new_tree) = NULL;
476 TYPE_REFERENCE_TO (new_tree) = NULL;
478 /* Copy all types that may contain references to local variables; be sure to
479 preserve sharing in between type and its main variant when possible. */
480 switch (TREE_CODE (new_tree))
482 case INTEGER_TYPE:
483 case REAL_TYPE:
484 case FIXED_POINT_TYPE:
485 case ENUMERAL_TYPE:
486 case BOOLEAN_TYPE:
487 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
489 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
490 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
492 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
493 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
495 else
497 t = TYPE_MIN_VALUE (new_tree);
498 if (t && TREE_CODE (t) != INTEGER_CST)
499 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
501 t = TYPE_MAX_VALUE (new_tree);
502 if (t && TREE_CODE (t) != INTEGER_CST)
503 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
505 return new_tree;
507 case FUNCTION_TYPE:
508 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
509 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
510 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
511 else
512 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
513 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
514 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
515 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
516 else
517 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
518 return new_tree;
520 case ARRAY_TYPE:
521 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
522 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
523 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
524 else
525 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
527 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
529 gcc_checking_assert (TYPE_DOMAIN (type)
530 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
531 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
533 else
535 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
536 /* For array bounds where we have decided not to copy over the bounds
537 variable which isn't used in OpenMP/OpenACC region, change them to
538 an uninitialized VAR_DECL temporary. */
539 if (id->adjust_array_error_bounds
540 && TYPE_DOMAIN (new_tree)
541 && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
542 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
544 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
545 DECL_ATTRIBUTES (v)
546 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
547 DECL_ATTRIBUTES (v));
548 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
551 break;
553 case RECORD_TYPE:
554 case UNION_TYPE:
555 case QUAL_UNION_TYPE:
556 if (TYPE_MAIN_VARIANT (type) != type
557 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
558 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
559 else
561 tree f, nf = NULL;
563 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
565 t = remap_decl (f, id);
566 DECL_CONTEXT (t) = new_tree;
567 DECL_CHAIN (t) = nf;
568 nf = t;
570 TYPE_FIELDS (new_tree) = nreverse (nf);
572 break;
574 case OFFSET_TYPE:
575 default:
576 /* Shouldn't have been thought variable sized. */
577 gcc_unreachable ();
580 /* All variants of type share the same size, so use the already remaped data. */
581 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
583 tree s = TYPE_SIZE (type);
584 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
585 tree su = TYPE_SIZE_UNIT (type);
586 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
587 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
588 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
589 || s == mvs);
590 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
591 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
592 || su == mvsu);
593 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
594 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
596 else
598 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
599 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
602 return new_tree;
605 /* Helper function for remap_type_2, called through walk_tree. */
607 static tree
608 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
610 copy_body_data *id = (copy_body_data *) data;
612 if (TYPE_P (*tp))
613 *walk_subtrees = 0;
615 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
616 return *tp;
618 return NULL_TREE;
621 /* Return true if TYPE needs to be remapped because remap_decl on any
622 needed embedded decl returns something other than that decl. */
624 static bool
625 remap_type_2 (tree type, copy_body_data *id)
627 tree t;
629 #define RETURN_TRUE_IF_VAR(T) \
630 do \
632 tree _t = (T); \
633 if (_t) \
635 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
636 return true; \
637 if (!TYPE_SIZES_GIMPLIFIED (type) \
638 && walk_tree (&_t, remap_type_3, id, NULL)) \
639 return true; \
642 while (0)
644 switch (TREE_CODE (type))
646 case POINTER_TYPE:
647 case REFERENCE_TYPE:
648 case FUNCTION_TYPE:
649 case METHOD_TYPE:
650 return remap_type_2 (TREE_TYPE (type), id);
652 case INTEGER_TYPE:
653 case REAL_TYPE:
654 case FIXED_POINT_TYPE:
655 case ENUMERAL_TYPE:
656 case BOOLEAN_TYPE:
657 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
658 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
659 return false;
661 case ARRAY_TYPE:
662 if (remap_type_2 (TREE_TYPE (type), id)
663 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
664 return true;
665 break;
667 case RECORD_TYPE:
668 case UNION_TYPE:
669 case QUAL_UNION_TYPE:
670 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
671 if (TREE_CODE (t) == FIELD_DECL)
673 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
674 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
675 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
676 if (TREE_CODE (type) == QUAL_UNION_TYPE)
677 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
679 break;
681 default:
682 return false;
685 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
686 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
687 return false;
688 #undef RETURN_TRUE_IF_VAR
691 tree
692 remap_type (tree type, copy_body_data *id)
694 tree *node;
695 tree tmp;
697 if (type == NULL)
698 return type;
700 /* See if we have remapped this type. */
701 node = id->decl_map->get (type);
702 if (node)
703 return *node;
705 /* The type only needs remapping if it's variably modified. */
706 if (! variably_modified_type_p (type, id->src_fn)
707 /* Don't remap if copy_decl method doesn't always return a new
708 decl and for all embedded decls returns the passed in decl. */
709 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
711 insert_decl_map (id, type, type);
712 return type;
715 id->remapping_type_depth++;
716 tmp = remap_type_1 (type, id);
717 id->remapping_type_depth--;
719 return tmp;
722 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
724 static bool
725 can_be_nonlocal (tree decl, copy_body_data *id)
727 /* We cannot duplicate function decls. */
728 if (TREE_CODE (decl) == FUNCTION_DECL)
729 return true;
731 /* Local static vars must be non-local or we get multiple declaration
732 problems. */
733 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
734 return true;
736 return false;
739 static tree
740 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
741 copy_body_data *id)
743 tree old_var;
744 tree new_decls = NULL_TREE;
746 /* Remap its variables. */
747 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
749 tree new_var;
751 if (can_be_nonlocal (old_var, id))
753 /* We need to add this variable to the local decls as otherwise
754 nothing else will do so. */
755 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
756 add_local_decl (cfun, old_var);
757 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
758 && !DECL_IGNORED_P (old_var)
759 && nonlocalized_list)
760 vec_safe_push (*nonlocalized_list, old_var);
761 continue;
764 /* Remap the variable. */
765 new_var = remap_decl (old_var, id);
767 /* If we didn't remap this variable, we can't mess with its
768 TREE_CHAIN. If we remapped this variable to the return slot, it's
769 already declared somewhere else, so don't declare it here. */
771 if (new_var == id->retvar)
773 else if (!new_var)
775 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
776 && !DECL_IGNORED_P (old_var)
777 && nonlocalized_list)
778 vec_safe_push (*nonlocalized_list, old_var);
780 else
782 gcc_assert (DECL_P (new_var));
783 DECL_CHAIN (new_var) = new_decls;
784 new_decls = new_var;
786 /* Also copy value-expressions. */
787 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
789 tree tem = DECL_VALUE_EXPR (new_var);
790 bool old_regimplify = id->regimplify;
791 id->remapping_type_depth++;
792 walk_tree (&tem, copy_tree_body_r, id, NULL);
793 id->remapping_type_depth--;
794 id->regimplify = old_regimplify;
795 SET_DECL_VALUE_EXPR (new_var, tem);
800 return nreverse (new_decls);
803 /* Copy the BLOCK to contain remapped versions of the variables
804 therein. And hook the new block into the block-tree. */
806 static void
807 remap_block (tree *block, copy_body_data *id)
809 tree old_block;
810 tree new_block;
812 /* Make the new block. */
813 old_block = *block;
814 new_block = make_node (BLOCK);
815 TREE_USED (new_block) = TREE_USED (old_block);
816 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
817 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
818 BLOCK_NONLOCALIZED_VARS (new_block)
819 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
820 *block = new_block;
822 /* Remap its variables. */
823 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
824 &BLOCK_NONLOCALIZED_VARS (new_block),
825 id);
827 if (id->transform_lang_insert_block)
828 id->transform_lang_insert_block (new_block);
830 /* Remember the remapped block. */
831 insert_decl_map (id, old_block, new_block);
834 /* Copy the whole block tree and root it in id->block. */
836 static tree
837 remap_blocks (tree block, copy_body_data *id)
839 tree t;
840 tree new_tree = block;
842 if (!block)
843 return NULL;
845 remap_block (&new_tree, id);
846 gcc_assert (new_tree != block);
847 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
848 prepend_lexical_block (new_tree, remap_blocks (t, id));
849 /* Blocks are in arbitrary order, but make things slightly prettier and do
850 not swap order when producing a copy. */
851 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
852 return new_tree;
855 /* Remap the block tree rooted at BLOCK to nothing. */
857 static void
858 remap_blocks_to_null (tree block, copy_body_data *id)
860 tree t;
861 insert_decl_map (id, block, NULL_TREE);
862 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
863 remap_blocks_to_null (t, id);
866 /* Remap the location info pointed to by LOCUS. */
868 static location_t
869 remap_location (location_t locus, copy_body_data *id)
871 if (LOCATION_BLOCK (locus))
873 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
874 gcc_assert (n);
875 if (*n)
876 return set_block (locus, *n);
879 locus = LOCATION_LOCUS (locus);
881 if (locus != UNKNOWN_LOCATION && id->block)
882 return set_block (locus, id->block);
884 return locus;
887 static void
888 copy_statement_list (tree *tp)
890 tree_stmt_iterator oi, ni;
891 tree new_tree;
893 new_tree = alloc_stmt_list ();
894 ni = tsi_start (new_tree);
895 oi = tsi_start (*tp);
896 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
897 *tp = new_tree;
899 for (; !tsi_end_p (oi); tsi_next (&oi))
901 tree stmt = tsi_stmt (oi);
902 if (TREE_CODE (stmt) == STATEMENT_LIST)
903 /* This copy is not redundant; tsi_link_after will smash this
904 STATEMENT_LIST into the end of the one we're building, and we
905 don't want to do that with the original. */
906 copy_statement_list (&stmt);
907 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
911 static void
912 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
914 tree block = BIND_EXPR_BLOCK (*tp);
915 /* Copy (and replace) the statement. */
916 copy_tree_r (tp, walk_subtrees, NULL);
917 if (block)
919 remap_block (&block, id);
920 BIND_EXPR_BLOCK (*tp) = block;
923 if (BIND_EXPR_VARS (*tp))
924 /* This will remap a lot of the same decls again, but this should be
925 harmless. */
926 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
930 /* Create a new gimple_seq by remapping all the statements in BODY
931 using the inlining information in ID. */
933 static gimple_seq
934 remap_gimple_seq (gimple_seq body, copy_body_data *id)
936 gimple_stmt_iterator si;
937 gimple_seq new_body = NULL;
939 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
941 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
942 gimple_seq_add_seq (&new_body, new_stmts);
945 return new_body;
949 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
950 block using the mapping information in ID. */
952 static gimple *
953 copy_gimple_bind (gbind *stmt, copy_body_data *id)
955 gimple *new_bind;
956 tree new_block, new_vars;
957 gimple_seq body, new_body;
959 /* Copy the statement. Note that we purposely don't use copy_stmt
960 here because we need to remap statements as we copy. */
961 body = gimple_bind_body (stmt);
962 new_body = remap_gimple_seq (body, id);
964 new_block = gimple_bind_block (stmt);
965 if (new_block)
966 remap_block (&new_block, id);
968 /* This will remap a lot of the same decls again, but this should be
969 harmless. */
970 new_vars = gimple_bind_vars (stmt);
971 if (new_vars)
972 new_vars = remap_decls (new_vars, NULL, id);
974 new_bind = gimple_build_bind (new_vars, new_body, new_block);
976 return new_bind;
979 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
981 static bool
982 is_parm (tree decl)
984 if (TREE_CODE (decl) == SSA_NAME)
986 decl = SSA_NAME_VAR (decl);
987 if (!decl)
988 return false;
991 return (TREE_CODE (decl) == PARM_DECL);
994 /* Remap the dependence CLIQUE from the source to the destination function
995 as specified in ID. */
997 static unsigned short
998 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1000 if (clique == 0 || processing_debug_stmt)
1001 return 0;
1002 if (!id->dependence_map)
1003 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1004 bool existed;
1005 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1006 if (!existed)
1008 /* Clique 1 is reserved for local ones set by PTA. */
1009 if (cfun->last_clique == 0)
1010 cfun->last_clique = 1;
1011 newc = ++cfun->last_clique;
1013 return newc;
1016 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1017 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1018 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1019 recursing into the children nodes of *TP. */
1021 static tree
1022 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1024 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1025 copy_body_data *id = (copy_body_data *) wi_p->info;
1026 tree fn = id->src_fn;
1028 /* For recursive invocations this is no longer the LHS itself. */
1029 bool is_lhs = wi_p->is_lhs;
1030 wi_p->is_lhs = false;
1032 if (TREE_CODE (*tp) == SSA_NAME)
1034 *tp = remap_ssa_name (*tp, id);
1035 *walk_subtrees = 0;
1036 if (is_lhs)
1037 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1038 return NULL;
1040 else if (auto_var_in_fn_p (*tp, fn))
1042 /* Local variables and labels need to be replaced by equivalent
1043 variables. We don't want to copy static variables; there's
1044 only one of those, no matter how many times we inline the
1045 containing function. Similarly for globals from an outer
1046 function. */
1047 tree new_decl;
1049 /* Remap the declaration. */
1050 new_decl = remap_decl (*tp, id);
1051 gcc_assert (new_decl);
1052 /* Replace this variable with the copy. */
1053 STRIP_TYPE_NOPS (new_decl);
1054 /* ??? The C++ frontend uses void * pointer zero to initialize
1055 any other type. This confuses the middle-end type verification.
1056 As cloned bodies do not go through gimplification again the fixup
1057 there doesn't trigger. */
1058 if (TREE_CODE (new_decl) == INTEGER_CST
1059 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1060 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1061 *tp = new_decl;
1062 *walk_subtrees = 0;
1064 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1065 gcc_unreachable ();
1066 else if (TREE_CODE (*tp) == SAVE_EXPR)
1067 gcc_unreachable ();
1068 else if (TREE_CODE (*tp) == LABEL_DECL
1069 && (!DECL_CONTEXT (*tp)
1070 || decl_function_context (*tp) == id->src_fn))
1071 /* These may need to be remapped for EH handling. */
1072 *tp = remap_decl (*tp, id);
1073 else if (TREE_CODE (*tp) == FIELD_DECL)
1075 /* If the enclosing record type is variably_modified_type_p, the field
1076 has already been remapped. Otherwise, it need not be. */
1077 tree *n = id->decl_map->get (*tp);
1078 if (n)
1079 *tp = *n;
1080 *walk_subtrees = 0;
1082 else if (TYPE_P (*tp))
1083 /* Types may need remapping as well. */
1084 *tp = remap_type (*tp, id);
1085 else if (CONSTANT_CLASS_P (*tp))
1087 /* If this is a constant, we have to copy the node iff the type
1088 will be remapped. copy_tree_r will not copy a constant. */
1089 tree new_type = remap_type (TREE_TYPE (*tp), id);
1091 if (new_type == TREE_TYPE (*tp))
1092 *walk_subtrees = 0;
1094 else if (TREE_CODE (*tp) == INTEGER_CST)
1095 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1096 else
1098 *tp = copy_node (*tp);
1099 TREE_TYPE (*tp) = new_type;
1102 else
1104 /* Otherwise, just copy the node. Note that copy_tree_r already
1105 knows not to copy VAR_DECLs, etc., so this is safe. */
1107 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1109 /* We need to re-canonicalize MEM_REFs from inline substitutions
1110 that can happen when a pointer argument is an ADDR_EXPR.
1111 Recurse here manually to allow that. */
1112 tree ptr = TREE_OPERAND (*tp, 0);
1113 tree type = remap_type (TREE_TYPE (*tp), id);
1114 tree old = *tp;
1115 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1116 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1117 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1118 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1119 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1120 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1122 MR_DEPENDENCE_CLIQUE (*tp)
1123 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1124 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1126 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1127 remapped a parameter as the property might be valid only
1128 for the parameter itself. */
1129 if (TREE_THIS_NOTRAP (old)
1130 && (!is_parm (TREE_OPERAND (old, 0))
1131 || (!id->transform_parameter && is_parm (ptr))))
1132 TREE_THIS_NOTRAP (*tp) = 1;
1133 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1134 *walk_subtrees = 0;
1135 return NULL;
1138 /* Here is the "usual case". Copy this tree node, and then
1139 tweak some special cases. */
1140 copy_tree_r (tp, walk_subtrees, NULL);
1142 if (TREE_CODE (*tp) != OMP_CLAUSE)
1143 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1145 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1147 /* The copied TARGET_EXPR has never been expanded, even if the
1148 original node was expanded already. */
1149 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1150 TREE_OPERAND (*tp, 3) = NULL_TREE;
1152 else if (TREE_CODE (*tp) == ADDR_EXPR)
1154 /* Variable substitution need not be simple. In particular,
1155 the MEM_REF substitution above. Make sure that
1156 TREE_CONSTANT and friends are up-to-date. */
1157 int invariant = is_gimple_min_invariant (*tp);
1158 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1159 recompute_tree_invariant_for_addr_expr (*tp);
1161 /* If this used to be invariant, but is not any longer,
1162 then regimplification is probably needed. */
1163 if (invariant && !is_gimple_min_invariant (*tp))
1164 id->regimplify = true;
1166 *walk_subtrees = 0;
1170 /* Update the TREE_BLOCK for the cloned expr. */
1171 if (EXPR_P (*tp))
1173 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1174 tree old_block = TREE_BLOCK (*tp);
1175 if (old_block)
1177 tree *n;
1178 n = id->decl_map->get (TREE_BLOCK (*tp));
1179 if (n)
1180 new_block = *n;
1182 TREE_SET_BLOCK (*tp, new_block);
1185 /* Keep iterating. */
1186 return NULL_TREE;
1190 /* Called from copy_body_id via walk_tree. DATA is really a
1191 `copy_body_data *'. */
1193 tree
1194 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1196 copy_body_data *id = (copy_body_data *) data;
1197 tree fn = id->src_fn;
1198 tree new_block;
1200 /* Begin by recognizing trees that we'll completely rewrite for the
1201 inlining context. Our output for these trees is completely
1202 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1203 into an edge). Further down, we'll handle trees that get
1204 duplicated and/or tweaked. */
1206 /* When requested, RETURN_EXPRs should be transformed to just the
1207 contained MODIFY_EXPR. The branch semantics of the return will
1208 be handled elsewhere by manipulating the CFG rather than a statement. */
1209 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1211 tree assignment = TREE_OPERAND (*tp, 0);
1213 /* If we're returning something, just turn that into an
1214 assignment into the equivalent of the original RESULT_DECL.
1215 If the "assignment" is just the result decl, the result
1216 decl has already been set (e.g. a recent "foo (&result_decl,
1217 ...)"); just toss the entire RETURN_EXPR. */
1218 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1220 /* Replace the RETURN_EXPR with (a copy of) the
1221 MODIFY_EXPR hanging underneath. */
1222 *tp = copy_node (assignment);
1224 else /* Else the RETURN_EXPR returns no value. */
1226 *tp = NULL;
1227 return (tree) (void *)1;
1230 else if (TREE_CODE (*tp) == SSA_NAME)
1232 *tp = remap_ssa_name (*tp, id);
1233 *walk_subtrees = 0;
1234 return NULL;
1237 /* Local variables and labels need to be replaced by equivalent
1238 variables. We don't want to copy static variables; there's only
1239 one of those, no matter how many times we inline the containing
1240 function. Similarly for globals from an outer function. */
1241 else if (auto_var_in_fn_p (*tp, fn))
1243 tree new_decl;
1245 /* Remap the declaration. */
1246 new_decl = remap_decl (*tp, id);
1247 gcc_assert (new_decl);
1248 /* Replace this variable with the copy. */
1249 STRIP_TYPE_NOPS (new_decl);
1250 *tp = new_decl;
1251 *walk_subtrees = 0;
1253 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1254 copy_statement_list (tp);
1255 else if (TREE_CODE (*tp) == SAVE_EXPR
1256 || TREE_CODE (*tp) == TARGET_EXPR)
1257 remap_save_expr (tp, id->decl_map, walk_subtrees);
1258 else if (TREE_CODE (*tp) == LABEL_DECL
1259 && (! DECL_CONTEXT (*tp)
1260 || decl_function_context (*tp) == id->src_fn))
1261 /* These may need to be remapped for EH handling. */
1262 *tp = remap_decl (*tp, id);
1263 else if (TREE_CODE (*tp) == BIND_EXPR)
1264 copy_bind_expr (tp, walk_subtrees, id);
1265 /* Types may need remapping as well. */
1266 else if (TYPE_P (*tp))
1267 *tp = remap_type (*tp, id);
1269 /* If this is a constant, we have to copy the node iff the type will be
1270 remapped. copy_tree_r will not copy a constant. */
1271 else if (CONSTANT_CLASS_P (*tp))
1273 tree new_type = remap_type (TREE_TYPE (*tp), id);
1275 if (new_type == TREE_TYPE (*tp))
1276 *walk_subtrees = 0;
1278 else if (TREE_CODE (*tp) == INTEGER_CST)
1279 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1280 else
1282 *tp = copy_node (*tp);
1283 TREE_TYPE (*tp) = new_type;
1287 /* Otherwise, just copy the node. Note that copy_tree_r already
1288 knows not to copy VAR_DECLs, etc., so this is safe. */
1289 else
1291 /* Here we handle trees that are not completely rewritten.
1292 First we detect some inlining-induced bogosities for
1293 discarding. */
1294 if (TREE_CODE (*tp) == MODIFY_EXPR
1295 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1296 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1298 /* Some assignments VAR = VAR; don't generate any rtl code
1299 and thus don't count as variable modification. Avoid
1300 keeping bogosities like 0 = 0. */
1301 tree decl = TREE_OPERAND (*tp, 0), value;
1302 tree *n;
1304 n = id->decl_map->get (decl);
1305 if (n)
1307 value = *n;
1308 STRIP_TYPE_NOPS (value);
1309 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1311 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1312 return copy_tree_body_r (tp, walk_subtrees, data);
1316 else if (TREE_CODE (*tp) == INDIRECT_REF)
1318 /* Get rid of *& from inline substitutions that can happen when a
1319 pointer argument is an ADDR_EXPR. */
1320 tree decl = TREE_OPERAND (*tp, 0);
1321 tree *n = id->decl_map->get (decl);
1322 if (n)
1324 /* If we happen to get an ADDR_EXPR in n->value, strip
1325 it manually here as we'll eventually get ADDR_EXPRs
1326 which lie about their types pointed to. In this case
1327 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1328 but we absolutely rely on that. As fold_indirect_ref
1329 does other useful transformations, try that first, though. */
1330 tree type = TREE_TYPE (*tp);
1331 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1332 tree old = *tp;
1333 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1334 if (! *tp)
1336 type = remap_type (type, id);
1337 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1340 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1341 /* ??? We should either assert here or build
1342 a VIEW_CONVERT_EXPR instead of blindly leaking
1343 incompatible types to our IL. */
1344 if (! *tp)
1345 *tp = TREE_OPERAND (ptr, 0);
1347 else
1349 *tp = build1 (INDIRECT_REF, type, ptr);
1350 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1351 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1352 TREE_READONLY (*tp) = TREE_READONLY (old);
1353 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1354 have remapped a parameter as the property might be
1355 valid only for the parameter itself. */
1356 if (TREE_THIS_NOTRAP (old)
1357 && (!is_parm (TREE_OPERAND (old, 0))
1358 || (!id->transform_parameter && is_parm (ptr))))
1359 TREE_THIS_NOTRAP (*tp) = 1;
1362 *walk_subtrees = 0;
1363 return NULL;
1366 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1368 /* We need to re-canonicalize MEM_REFs from inline substitutions
1369 that can happen when a pointer argument is an ADDR_EXPR.
1370 Recurse here manually to allow that. */
1371 tree ptr = TREE_OPERAND (*tp, 0);
1372 tree type = remap_type (TREE_TYPE (*tp), id);
1373 tree old = *tp;
1374 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1375 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1376 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1377 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1378 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1379 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1381 MR_DEPENDENCE_CLIQUE (*tp)
1382 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1383 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1385 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1386 remapped a parameter as the property might be valid only
1387 for the parameter itself. */
1388 if (TREE_THIS_NOTRAP (old)
1389 && (!is_parm (TREE_OPERAND (old, 0))
1390 || (!id->transform_parameter && is_parm (ptr))))
1391 TREE_THIS_NOTRAP (*tp) = 1;
1392 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1393 *walk_subtrees = 0;
1394 return NULL;
1397 /* Here is the "usual case". Copy this tree node, and then
1398 tweak some special cases. */
1399 copy_tree_r (tp, walk_subtrees, NULL);
1401 /* If EXPR has block defined, map it to newly constructed block.
1402 When inlining we want EXPRs without block appear in the block
1403 of function call if we are not remapping a type. */
1404 if (EXPR_P (*tp))
1406 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1407 if (TREE_BLOCK (*tp))
1409 tree *n;
1410 n = id->decl_map->get (TREE_BLOCK (*tp));
1411 if (n)
1412 new_block = *n;
1414 TREE_SET_BLOCK (*tp, new_block);
1417 if (TREE_CODE (*tp) != OMP_CLAUSE)
1418 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1420 /* The copied TARGET_EXPR has never been expanded, even if the
1421 original node was expanded already. */
1422 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1424 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1425 TREE_OPERAND (*tp, 3) = NULL_TREE;
1428 /* Variable substitution need not be simple. In particular, the
1429 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1430 and friends are up-to-date. */
1431 else if (TREE_CODE (*tp) == ADDR_EXPR)
1433 int invariant = is_gimple_min_invariant (*tp);
1434 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1436 /* Handle the case where we substituted an INDIRECT_REF
1437 into the operand of the ADDR_EXPR. */
1438 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1439 && !id->do_not_fold)
1441 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1442 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1443 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1444 *tp = t;
1446 else
1447 recompute_tree_invariant_for_addr_expr (*tp);
1449 /* If this used to be invariant, but is not any longer,
1450 then regimplification is probably needed. */
1451 if (invariant && !is_gimple_min_invariant (*tp))
1452 id->regimplify = true;
1454 *walk_subtrees = 0;
1456 else if (TREE_CODE (*tp) == OMP_CLAUSE
1457 && (OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_AFFINITY
1458 || OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_DEPEND))
1460 tree t = OMP_CLAUSE_DECL (*tp);
1461 if (TREE_CODE (t) == TREE_LIST
1462 && TREE_PURPOSE (t)
1463 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
1465 *walk_subtrees = 0;
1466 OMP_CLAUSE_DECL (*tp) = copy_node (t);
1467 t = OMP_CLAUSE_DECL (*tp);
1468 TREE_PURPOSE (t) = copy_node (TREE_PURPOSE (t));
1469 for (int i = 0; i <= 4; i++)
1470 walk_tree (&TREE_VEC_ELT (TREE_PURPOSE (t), i),
1471 copy_tree_body_r, id, NULL);
1472 if (TREE_VEC_ELT (TREE_PURPOSE (t), 5))
1473 remap_block (&TREE_VEC_ELT (TREE_PURPOSE (t), 5), id);
1474 walk_tree (&TREE_VALUE (t), copy_tree_body_r, id, NULL);
1479 /* Keep iterating. */
1480 return NULL_TREE;
1483 /* Helper for remap_gimple_stmt. Given an EH region number for the
1484 source function, map that to the duplicate EH region number in
1485 the destination function. */
1487 static int
1488 remap_eh_region_nr (int old_nr, copy_body_data *id)
1490 eh_region old_r, new_r;
1492 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1493 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1495 return new_r->index;
1498 /* Similar, but operate on INTEGER_CSTs. */
1500 static tree
1501 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1503 int old_nr, new_nr;
1505 old_nr = tree_to_shwi (old_t_nr);
1506 new_nr = remap_eh_region_nr (old_nr, id);
1508 return build_int_cst (integer_type_node, new_nr);
1511 /* Helper for copy_bb. Remap statement STMT using the inlining
1512 information in ID. Return the new statement copy. */
1514 static gimple_seq
1515 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1517 gimple *copy = NULL;
1518 struct walk_stmt_info wi;
1519 bool skip_first = false;
1520 gimple_seq stmts = NULL;
1522 if (is_gimple_debug (stmt)
1523 && (gimple_debug_nonbind_marker_p (stmt)
1524 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1525 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1526 return NULL;
1528 /* Begin by recognizing trees that we'll completely rewrite for the
1529 inlining context. Our output for these trees is completely
1530 different from our input (e.g. RETURN_EXPR is deleted and morphs
1531 into an edge). Further down, we'll handle trees that get
1532 duplicated and/or tweaked. */
1534 /* When requested, GIMPLE_RETURN should be transformed to just the
1535 contained GIMPLE_ASSIGN. The branch semantics of the return will
1536 be handled elsewhere by manipulating the CFG rather than the
1537 statement. */
1538 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1540 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1542 /* If we're returning something, just turn that into an
1543 assignment to the equivalent of the original RESULT_DECL.
1544 If RETVAL is just the result decl, the result decl has
1545 already been set (e.g. a recent "foo (&result_decl, ...)");
1546 just toss the entire GIMPLE_RETURN. Likewise for when the
1547 call doesn't want the return value. */
1548 if (retval
1549 && (TREE_CODE (retval) != RESULT_DECL
1550 && (!id->call_stmt
1551 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1552 && (TREE_CODE (retval) != SSA_NAME
1553 || ! SSA_NAME_VAR (retval)
1554 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1556 copy = gimple_build_assign (id->do_not_unshare
1557 ? id->retvar : unshare_expr (id->retvar),
1558 retval);
1559 /* id->retvar is already substituted. Skip it on later remapping. */
1560 skip_first = true;
1562 else
1563 return NULL;
1565 else if (gimple_has_substatements (stmt))
1567 gimple_seq s1, s2;
1569 /* When cloning bodies from the C++ front end, we will be handed bodies
1570 in High GIMPLE form. Handle here all the High GIMPLE statements that
1571 have embedded statements. */
1572 switch (gimple_code (stmt))
1574 case GIMPLE_BIND:
1575 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1576 break;
1578 case GIMPLE_CATCH:
1580 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1581 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1582 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1584 break;
1586 case GIMPLE_EH_FILTER:
1587 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1588 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1589 break;
1591 case GIMPLE_TRY:
1592 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1593 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1594 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1595 break;
1597 case GIMPLE_WITH_CLEANUP_EXPR:
1598 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1599 copy = gimple_build_wce (s1);
1600 break;
1602 case GIMPLE_OMP_PARALLEL:
1604 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1605 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1606 copy = gimple_build_omp_parallel
1607 (s1,
1608 gimple_omp_parallel_clauses (omp_par_stmt),
1609 gimple_omp_parallel_child_fn (omp_par_stmt),
1610 gimple_omp_parallel_data_arg (omp_par_stmt));
1612 break;
1614 case GIMPLE_OMP_TASK:
1615 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1616 copy = gimple_build_omp_task
1617 (s1,
1618 gimple_omp_task_clauses (stmt),
1619 gimple_omp_task_child_fn (stmt),
1620 gimple_omp_task_data_arg (stmt),
1621 gimple_omp_task_copy_fn (stmt),
1622 gimple_omp_task_arg_size (stmt),
1623 gimple_omp_task_arg_align (stmt));
1624 break;
1626 case GIMPLE_OMP_FOR:
1627 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1628 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1629 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1630 gimple_omp_for_clauses (stmt),
1631 gimple_omp_for_collapse (stmt), s2);
1633 size_t i;
1634 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1636 gimple_omp_for_set_index (copy, i,
1637 gimple_omp_for_index (stmt, i));
1638 gimple_omp_for_set_initial (copy, i,
1639 gimple_omp_for_initial (stmt, i));
1640 gimple_omp_for_set_final (copy, i,
1641 gimple_omp_for_final (stmt, i));
1642 gimple_omp_for_set_incr (copy, i,
1643 gimple_omp_for_incr (stmt, i));
1644 gimple_omp_for_set_cond (copy, i,
1645 gimple_omp_for_cond (stmt, i));
1648 break;
1650 case GIMPLE_OMP_MASTER:
1651 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1652 copy = gimple_build_omp_master (s1);
1653 break;
1655 case GIMPLE_OMP_TASKGROUP:
1656 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1657 copy = gimple_build_omp_taskgroup
1658 (s1, gimple_omp_taskgroup_clauses (stmt));
1659 break;
1661 case GIMPLE_OMP_ORDERED:
1662 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1663 copy = gimple_build_omp_ordered
1664 (s1,
1665 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1666 break;
1668 case GIMPLE_OMP_SCAN:
1669 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1670 copy = gimple_build_omp_scan
1671 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1672 break;
1674 case GIMPLE_OMP_SECTION:
1675 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1676 copy = gimple_build_omp_section (s1);
1677 break;
1679 case GIMPLE_OMP_SECTIONS:
1680 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1681 copy = gimple_build_omp_sections
1682 (s1, gimple_omp_sections_clauses (stmt));
1683 break;
1685 case GIMPLE_OMP_SINGLE:
1686 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1687 copy = gimple_build_omp_single
1688 (s1, gimple_omp_single_clauses (stmt));
1689 break;
1691 case GIMPLE_OMP_TARGET:
1692 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1693 copy = gimple_build_omp_target
1694 (s1, gimple_omp_target_kind (stmt),
1695 gimple_omp_target_clauses (stmt));
1696 break;
1698 case GIMPLE_OMP_TEAMS:
1699 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1700 copy = gimple_build_omp_teams
1701 (s1, gimple_omp_teams_clauses (stmt));
1702 break;
1704 case GIMPLE_OMP_CRITICAL:
1705 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1706 copy = gimple_build_omp_critical (s1,
1707 gimple_omp_critical_name
1708 (as_a <gomp_critical *> (stmt)),
1709 gimple_omp_critical_clauses
1710 (as_a <gomp_critical *> (stmt)));
1711 break;
1713 case GIMPLE_TRANSACTION:
1715 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1716 gtransaction *new_trans_stmt;
1717 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1718 id);
1719 copy = new_trans_stmt = gimple_build_transaction (s1);
1720 gimple_transaction_set_subcode (new_trans_stmt,
1721 gimple_transaction_subcode (old_trans_stmt));
1722 gimple_transaction_set_label_norm (new_trans_stmt,
1723 gimple_transaction_label_norm (old_trans_stmt));
1724 gimple_transaction_set_label_uninst (new_trans_stmt,
1725 gimple_transaction_label_uninst (old_trans_stmt));
1726 gimple_transaction_set_label_over (new_trans_stmt,
1727 gimple_transaction_label_over (old_trans_stmt));
1729 break;
1731 default:
1732 gcc_unreachable ();
1735 else
1737 if (gimple_assign_copy_p (stmt)
1738 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1739 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1741 /* Here we handle statements that are not completely rewritten.
1742 First we detect some inlining-induced bogosities for
1743 discarding. */
1745 /* Some assignments VAR = VAR; don't generate any rtl code
1746 and thus don't count as variable modification. Avoid
1747 keeping bogosities like 0 = 0. */
1748 tree decl = gimple_assign_lhs (stmt), value;
1749 tree *n;
1751 n = id->decl_map->get (decl);
1752 if (n)
1754 value = *n;
1755 STRIP_TYPE_NOPS (value);
1756 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1757 return NULL;
1761 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1762 in a block that we aren't copying during tree_function_versioning,
1763 just drop the clobber stmt. */
1764 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1766 tree lhs = gimple_assign_lhs (stmt);
1767 if (TREE_CODE (lhs) == MEM_REF
1768 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1770 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1771 if (gimple_bb (def_stmt)
1772 && !bitmap_bit_p (id->blocks_to_copy,
1773 gimple_bb (def_stmt)->index))
1774 return NULL;
1778 /* We do not allow CLOBBERs of handled components. In case
1779 returned value is stored via such handled component, remove
1780 the clobber so stmt verifier is happy. */
1781 if (gimple_clobber_p (stmt)
1782 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1784 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1785 if (!DECL_P (remapped)
1786 && TREE_CODE (remapped) != MEM_REF)
1787 return NULL;
1790 if (gimple_debug_bind_p (stmt))
1792 gdebug *copy
1793 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1794 gimple_debug_bind_get_value (stmt),
1795 stmt);
1796 if (id->reset_location)
1797 gimple_set_location (copy, input_location);
1798 id->debug_stmts.safe_push (copy);
1799 gimple_seq_add_stmt (&stmts, copy);
1800 return stmts;
1802 if (gimple_debug_source_bind_p (stmt))
1804 gdebug *copy = gimple_build_debug_source_bind
1805 (gimple_debug_source_bind_get_var (stmt),
1806 gimple_debug_source_bind_get_value (stmt),
1807 stmt);
1808 if (id->reset_location)
1809 gimple_set_location (copy, input_location);
1810 id->debug_stmts.safe_push (copy);
1811 gimple_seq_add_stmt (&stmts, copy);
1812 return stmts;
1814 if (gimple_debug_nonbind_marker_p (stmt))
1816 /* If the inlined function has too many debug markers,
1817 don't copy them. */
1818 if (id->src_cfun->debug_marker_count
1819 > param_max_debug_marker_count
1820 || id->reset_location)
1821 return stmts;
1823 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1824 id->debug_stmts.safe_push (copy);
1825 gimple_seq_add_stmt (&stmts, copy);
1826 return stmts;
1829 /* Create a new deep copy of the statement. */
1830 copy = gimple_copy (stmt);
1832 /* Clear flags that need revisiting. */
1833 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1835 if (gimple_call_tail_p (call_stmt))
1836 gimple_call_set_tail (call_stmt, false);
1837 if (gimple_call_from_thunk_p (call_stmt))
1838 gimple_call_set_from_thunk (call_stmt, false);
1839 if (gimple_call_internal_p (call_stmt))
1840 switch (gimple_call_internal_fn (call_stmt))
1842 case IFN_GOMP_SIMD_LANE:
1843 case IFN_GOMP_SIMD_VF:
1844 case IFN_GOMP_SIMD_LAST_LANE:
1845 case IFN_GOMP_SIMD_ORDERED_START:
1846 case IFN_GOMP_SIMD_ORDERED_END:
1847 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1848 break;
1849 default:
1850 break;
1854 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1855 RESX and EH_DISPATCH. */
1856 if (id->eh_map)
1857 switch (gimple_code (copy))
1859 case GIMPLE_CALL:
1861 tree r, fndecl = gimple_call_fndecl (copy);
1862 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1863 switch (DECL_FUNCTION_CODE (fndecl))
1865 case BUILT_IN_EH_COPY_VALUES:
1866 r = gimple_call_arg (copy, 1);
1867 r = remap_eh_region_tree_nr (r, id);
1868 gimple_call_set_arg (copy, 1, r);
1869 /* FALLTHRU */
1871 case BUILT_IN_EH_POINTER:
1872 case BUILT_IN_EH_FILTER:
1873 r = gimple_call_arg (copy, 0);
1874 r = remap_eh_region_tree_nr (r, id);
1875 gimple_call_set_arg (copy, 0, r);
1876 break;
1878 default:
1879 break;
1882 /* Reset alias info if we didn't apply measures to
1883 keep it valid over inlining by setting DECL_PT_UID. */
1884 if (!id->src_cfun->gimple_df
1885 || !id->src_cfun->gimple_df->ipa_pta)
1886 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1888 break;
1890 case GIMPLE_RESX:
1892 gresx *resx_stmt = as_a <gresx *> (copy);
1893 int r = gimple_resx_region (resx_stmt);
1894 r = remap_eh_region_nr (r, id);
1895 gimple_resx_set_region (resx_stmt, r);
1897 break;
1899 case GIMPLE_EH_DISPATCH:
1901 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1902 int r = gimple_eh_dispatch_region (eh_dispatch);
1903 r = remap_eh_region_nr (r, id);
1904 gimple_eh_dispatch_set_region (eh_dispatch, r);
1906 break;
1908 default:
1909 break;
1913 /* If STMT has a block defined, map it to the newly constructed block. */
1914 if (tree block = gimple_block (copy))
1916 tree *n;
1917 n = id->decl_map->get (block);
1918 gcc_assert (n);
1919 gimple_set_block (copy, *n);
1921 if (id->param_body_adjs)
1923 gimple_seq extra_stmts = NULL;
1924 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
1925 if (!gimple_seq_empty_p (extra_stmts))
1927 memset (&wi, 0, sizeof (wi));
1928 wi.info = id;
1929 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1930 !gsi_end_p (egsi);
1931 gsi_next (&egsi))
1932 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1933 gimple_seq_add_seq (&stmts, extra_stmts);
1937 if (id->reset_location)
1938 gimple_set_location (copy, input_location);
1940 /* Debug statements ought to be rebuilt and not copied. */
1941 gcc_checking_assert (!is_gimple_debug (copy));
1943 /* Remap all the operands in COPY. */
1944 memset (&wi, 0, sizeof (wi));
1945 wi.info = id;
1946 if (skip_first)
1947 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1948 else
1949 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1951 /* Clear the copied virtual operands. We are not remapping them here
1952 but are going to recreate them from scratch. */
1953 if (gimple_has_mem_ops (copy))
1955 gimple_set_vdef (copy, NULL_TREE);
1956 gimple_set_vuse (copy, NULL_TREE);
1959 if (cfun->can_throw_non_call_exceptions)
1961 /* When inlining a function which does not have non-call exceptions
1962 enabled into a function that has (which only happens with
1963 always-inline) we have to fixup stmts that cannot throw. */
1964 if (gcond *cond = dyn_cast <gcond *> (copy))
1965 if (gimple_could_trap_p (cond))
1967 gassign *cmp
1968 = gimple_build_assign (make_ssa_name (boolean_type_node),
1969 gimple_cond_code (cond),
1970 gimple_cond_lhs (cond),
1971 gimple_cond_rhs (cond));
1972 gimple_seq_add_stmt (&stmts, cmp);
1973 gimple_cond_set_code (cond, NE_EXPR);
1974 gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
1975 gimple_cond_set_rhs (cond, boolean_false_node);
1979 gimple_seq_add_stmt (&stmts, copy);
1980 return stmts;
1984 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1985 later */
1987 static basic_block
1988 copy_bb (copy_body_data *id, basic_block bb,
1989 profile_count num, profile_count den)
1991 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1992 basic_block copy_basic_block;
1993 tree decl;
1994 basic_block prev;
1996 profile_count::adjust_for_ipa_scaling (&num, &den);
1998 /* Search for previous copied basic block. */
1999 prev = bb->prev_bb;
2000 while (!prev->aux)
2001 prev = prev->prev_bb;
2003 /* create_basic_block() will append every new block to
2004 basic_block_info automatically. */
2005 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2006 copy_basic_block->count = bb->count.apply_scale (num, den);
2008 copy_gsi = gsi_start_bb (copy_basic_block);
2010 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2012 gimple_seq stmts;
2013 gimple *stmt = gsi_stmt (gsi);
2014 gimple *orig_stmt = stmt;
2015 gimple_stmt_iterator stmts_gsi;
2016 bool stmt_added = false;
2018 id->regimplify = false;
2019 stmts = remap_gimple_stmt (stmt, id);
2021 if (gimple_seq_empty_p (stmts))
2022 continue;
2024 seq_gsi = copy_gsi;
2026 for (stmts_gsi = gsi_start (stmts);
2027 !gsi_end_p (stmts_gsi); )
2029 stmt = gsi_stmt (stmts_gsi);
2031 /* Advance iterator now before stmt is moved to seq_gsi. */
2032 gsi_next (&stmts_gsi);
2034 if (gimple_nop_p (stmt))
2035 continue;
2037 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2038 orig_stmt);
2040 /* With return slot optimization we can end up with
2041 non-gimple (foo *)&this->m, fix that here. */
2042 if (is_gimple_assign (stmt)
2043 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2044 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2046 tree new_rhs;
2047 new_rhs = force_gimple_operand_gsi (&seq_gsi,
2048 gimple_assign_rhs1 (stmt),
2049 true, NULL, false,
2050 GSI_CONTINUE_LINKING);
2051 gimple_assign_set_rhs1 (stmt, new_rhs);
2052 id->regimplify = false;
2055 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2057 if (id->regimplify)
2058 gimple_regimplify_operands (stmt, &seq_gsi);
2060 stmt_added = true;
2063 if (!stmt_added)
2064 continue;
2066 /* If copy_basic_block has been empty at the start of this iteration,
2067 call gsi_start_bb again to get at the newly added statements. */
2068 if (gsi_end_p (copy_gsi))
2069 copy_gsi = gsi_start_bb (copy_basic_block);
2070 else
2071 gsi_next (&copy_gsi);
2073 /* Process the new statement. The call to gimple_regimplify_operands
2074 possibly turned the statement into multiple statements, we
2075 need to process all of them. */
2078 tree fn;
2079 gcall *call_stmt;
2081 stmt = gsi_stmt (copy_gsi);
2082 call_stmt = dyn_cast <gcall *> (stmt);
2083 if (call_stmt
2084 && gimple_call_va_arg_pack_p (call_stmt)
2085 && id->call_stmt
2086 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2088 /* __builtin_va_arg_pack () should be replaced by
2089 all arguments corresponding to ... in the caller. */
2090 tree p;
2091 gcall *new_call;
2092 vec<tree> argarray;
2093 size_t nargs_caller = gimple_call_num_args (id->call_stmt);
2094 size_t nargs = nargs_caller;
2096 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2097 nargs--;
2099 /* Create the new array of arguments. */
2100 size_t nargs_callee = gimple_call_num_args (call_stmt);
2101 size_t n = nargs + nargs_callee;
2102 argarray.create (n);
2103 argarray.safe_grow_cleared (n, true);
2105 /* Copy all the arguments before '...' */
2106 if (nargs_callee)
2107 memcpy (argarray.address (),
2108 gimple_call_arg_ptr (call_stmt, 0),
2109 nargs_callee * sizeof (tree));
2111 /* Append the arguments passed in '...' */
2112 if (nargs)
2113 memcpy (argarray.address () + nargs_callee,
2114 gimple_call_arg_ptr (id->call_stmt, 0)
2115 + (nargs_caller - nargs), nargs * sizeof (tree));
2117 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2118 argarray);
2120 argarray.release ();
2122 /* Copy all GIMPLE_CALL flags, location and block, except
2123 GF_CALL_VA_ARG_PACK. */
2124 gimple_call_copy_flags (new_call, call_stmt);
2125 gimple_call_set_va_arg_pack (new_call, false);
2126 gimple_call_set_fntype (new_call, gimple_call_fntype (call_stmt));
2127 /* location includes block. */
2128 gimple_set_location (new_call, gimple_location (stmt));
2129 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2131 gsi_replace (&copy_gsi, new_call, false);
2132 stmt = new_call;
2134 else if (call_stmt
2135 && id->call_stmt
2136 && (decl = gimple_call_fndecl (stmt))
2137 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2139 /* __builtin_va_arg_pack_len () should be replaced by
2140 the number of anonymous arguments. */
2141 size_t nargs = gimple_call_num_args (id->call_stmt);
2142 tree count, p;
2143 gimple *new_stmt;
2145 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2146 nargs--;
2148 if (!gimple_call_lhs (stmt))
2150 /* Drop unused calls. */
2151 gsi_remove (&copy_gsi, false);
2152 continue;
2154 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2156 count = build_int_cst (integer_type_node, nargs);
2157 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2158 gsi_replace (&copy_gsi, new_stmt, false);
2159 stmt = new_stmt;
2161 else if (nargs != 0)
2163 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2164 count = build_int_cst (integer_type_node, nargs);
2165 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2166 PLUS_EXPR, newlhs, count);
2167 gimple_call_set_lhs (stmt, newlhs);
2168 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2171 else if (call_stmt
2172 && id->call_stmt
2173 && gimple_call_internal_p (stmt)
2174 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2176 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2177 gsi_remove (&copy_gsi, false);
2178 continue;
2181 /* Statements produced by inlining can be unfolded, especially
2182 when we constant propagated some operands. We can't fold
2183 them right now for two reasons:
2184 1) folding require SSA_NAME_DEF_STMTs to be correct
2185 2) we can't change function calls to builtins.
2186 So we just mark statement for later folding. We mark
2187 all new statements, instead just statements that has changed
2188 by some nontrivial substitution so even statements made
2189 foldable indirectly are updated. If this turns out to be
2190 expensive, copy_body can be told to watch for nontrivial
2191 changes. */
2192 if (id->statements_to_fold)
2193 id->statements_to_fold->add (stmt);
2195 /* We're duplicating a CALL_EXPR. Find any corresponding
2196 callgraph edges and update or duplicate them. */
2197 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2199 struct cgraph_edge *edge;
2201 switch (id->transform_call_graph_edges)
2203 case CB_CGE_DUPLICATE:
2204 edge = id->src_node->get_edge (orig_stmt);
2205 if (edge)
2207 struct cgraph_edge *old_edge = edge;
2209 /* A speculative call is consist of multiple
2210 edges - indirect edge and one or more direct edges
2211 Duplicate the whole thing and distribute frequencies
2212 accordingly. */
2213 if (edge->speculative)
2215 int n = 0;
2216 profile_count direct_cnt
2217 = profile_count::zero ();
2219 /* First figure out the distribution of counts
2220 so we can re-scale BB profile accordingly. */
2221 for (cgraph_edge *e = old_edge; e;
2222 e = e->next_speculative_call_target ())
2223 direct_cnt = direct_cnt + e->count;
2225 cgraph_edge *indirect
2226 = old_edge->speculative_call_indirect_edge ();
2227 profile_count indir_cnt = indirect->count;
2229 /* Next iterate all direct edges, clone it and its
2230 corresponding reference and update profile. */
2231 for (cgraph_edge *e = old_edge;
2233 e = e->next_speculative_call_target ())
2235 profile_count cnt = e->count;
2237 id->dst_node->clone_reference
2238 (e->speculative_call_target_ref (), stmt);
2239 edge = e->clone (id->dst_node, call_stmt,
2240 gimple_uid (stmt), num, den,
2241 true);
2242 profile_probability prob
2243 = cnt.probability_in (direct_cnt
2244 + indir_cnt);
2245 edge->count
2246 = copy_basic_block->count.apply_probability
2247 (prob);
2248 n++;
2250 gcc_checking_assert
2251 (indirect->num_speculative_call_targets_p ()
2252 == n);
2254 /* Duplicate the indirect edge after all direct edges
2255 cloned. */
2256 indirect = indirect->clone (id->dst_node, call_stmt,
2257 gimple_uid (stmt),
2258 num, den,
2259 true);
2261 profile_probability prob
2262 = indir_cnt.probability_in (direct_cnt
2263 + indir_cnt);
2264 indirect->count
2265 = copy_basic_block->count.apply_probability (prob);
2267 else
2269 edge = edge->clone (id->dst_node, call_stmt,
2270 gimple_uid (stmt),
2271 num, den,
2272 true);
2273 edge->count = copy_basic_block->count;
2276 break;
2278 case CB_CGE_MOVE_CLONES:
2279 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2280 call_stmt);
2281 edge = id->dst_node->get_edge (stmt);
2282 break;
2284 case CB_CGE_MOVE:
2285 edge = id->dst_node->get_edge (orig_stmt);
2286 if (edge)
2287 edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2288 break;
2290 default:
2291 gcc_unreachable ();
2294 /* Constant propagation on argument done during inlining
2295 may create new direct call. Produce an edge for it. */
2296 if ((!edge
2297 || (edge->indirect_inlining_edge
2298 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2299 && id->dst_node->definition
2300 && (fn = gimple_call_fndecl (stmt)) != NULL)
2302 struct cgraph_node *dest = cgraph_node::get_create (fn);
2304 /* We have missing edge in the callgraph. This can happen
2305 when previous inlining turned an indirect call into a
2306 direct call by constant propagating arguments or we are
2307 producing dead clone (for further cloning). In all
2308 other cases we hit a bug (incorrect node sharing is the
2309 most common reason for missing edges). */
2310 gcc_assert (!dest->definition
2311 || dest->address_taken
2312 || !id->src_node->definition
2313 || !id->dst_node->definition);
2314 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2315 id->dst_node->create_edge_including_clones
2316 (dest, orig_stmt, call_stmt, bb->count,
2317 CIF_ORIGINALLY_INDIRECT_CALL);
2318 else
2319 id->dst_node->create_edge (dest, call_stmt,
2320 bb->count)->inline_failed
2321 = CIF_ORIGINALLY_INDIRECT_CALL;
2322 if (dump_file)
2324 fprintf (dump_file, "Created new direct edge to %s\n",
2325 dest->dump_name ());
2329 notice_special_calls (as_a <gcall *> (stmt));
2332 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2333 id->eh_map, id->eh_lp_nr);
2335 gsi_next (&copy_gsi);
2337 while (!gsi_end_p (copy_gsi));
2339 copy_gsi = gsi_last_bb (copy_basic_block);
2342 return copy_basic_block;
2345 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2346 form is quite easy, since dominator relationship for old basic blocks does
2347 not change.
2349 There is however exception where inlining might change dominator relation
2350 across EH edges from basic block within inlined functions destinating
2351 to landing pads in function we inline into.
2353 The function fills in PHI_RESULTs of such PHI nodes if they refer
2354 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2355 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2356 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2357 set, and this means that there will be no overlapping live ranges
2358 for the underlying symbol.
2360 This might change in future if we allow redirecting of EH edges and
2361 we might want to change way build CFG pre-inlining to include
2362 all the possible edges then. */
2363 static void
2364 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2365 bool can_throw, bool nonlocal_goto)
2367 edge e;
2368 edge_iterator ei;
2370 FOR_EACH_EDGE (e, ei, bb->succs)
2371 if (!e->dest->aux
2372 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2374 gphi *phi;
2375 gphi_iterator si;
2377 if (!nonlocal_goto)
2378 gcc_assert (e->flags & EDGE_EH);
2380 if (!can_throw)
2381 gcc_assert (!(e->flags & EDGE_EH));
2383 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2385 edge re;
2387 phi = si.phi ();
2389 /* For abnormal goto/call edges the receiver can be the
2390 ENTRY_BLOCK. Do not assert this cannot happen. */
2392 gcc_assert ((e->flags & EDGE_EH)
2393 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2395 re = find_edge (ret_bb, e->dest);
2396 gcc_checking_assert (re);
2397 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2398 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2400 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2401 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2406 /* Insert clobbers for automatic variables of inlined ID->src_fn
2407 function at the start of basic block ID->eh_landing_pad_dest. */
2409 static void
2410 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2412 tree var;
2413 basic_block bb = id->eh_landing_pad_dest;
2414 live_vars_map *vars = NULL;
2415 unsigned int cnt = 0;
2416 unsigned int i;
2417 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2418 if (VAR_P (var)
2419 && !DECL_HARD_REGISTER (var)
2420 && !TREE_THIS_VOLATILE (var)
2421 && !DECL_HAS_VALUE_EXPR_P (var)
2422 && !is_gimple_reg (var)
2423 && auto_var_in_fn_p (var, id->src_fn)
2424 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2426 tree *t = id->decl_map->get (var);
2427 if (!t)
2428 continue;
2429 tree new_var = *t;
2430 if (VAR_P (new_var)
2431 && !DECL_HARD_REGISTER (new_var)
2432 && !TREE_THIS_VOLATILE (new_var)
2433 && !DECL_HAS_VALUE_EXPR_P (new_var)
2434 && !is_gimple_reg (new_var)
2435 && auto_var_in_fn_p (new_var, id->dst_fn))
2437 if (vars == NULL)
2438 vars = new live_vars_map;
2439 vars->put (DECL_UID (var), cnt++);
2442 if (vars == NULL)
2443 return;
2445 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2446 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2447 if (VAR_P (var))
2449 edge e;
2450 edge_iterator ei;
2451 bool needed = false;
2452 unsigned int *v = vars->get (DECL_UID (var));
2453 if (v == NULL)
2454 continue;
2455 FOR_EACH_EDGE (e, ei, bb->preds)
2456 if ((e->flags & EDGE_EH) != 0
2457 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2459 basic_block src_bb = (basic_block) e->src->aux;
2461 if (bitmap_bit_p (&live[src_bb->index], *v))
2463 needed = true;
2464 break;
2467 if (needed)
2469 tree new_var = *id->decl_map->get (var);
2470 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2471 tree clobber = build_clobber (TREE_TYPE (new_var));
2472 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2473 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2476 destroy_live_vars (live);
2477 delete vars;
2480 /* Copy edges from BB into its copy constructed earlier, scale profile
2481 accordingly. Edges will be taken care of later. Assume aux
2482 pointers to point to the copies of each BB. Return true if any
2483 debug stmts are left after a statement that must end the basic block. */
2485 static bool
2486 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2487 basic_block ret_bb, basic_block abnormal_goto_dest,
2488 copy_body_data *id)
2490 basic_block new_bb = (basic_block) bb->aux;
2491 edge_iterator ei;
2492 edge old_edge;
2493 gimple_stmt_iterator si;
2494 bool need_debug_cleanup = false;
2496 /* Use the indices from the original blocks to create edges for the
2497 new ones. */
2498 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2499 if (!(old_edge->flags & EDGE_EH))
2501 edge new_edge;
2502 int flags = old_edge->flags;
2503 location_t locus = old_edge->goto_locus;
2505 /* Return edges do get a FALLTHRU flag when they get inlined. */
2506 if (old_edge->dest->index == EXIT_BLOCK
2507 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2508 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2509 flags |= EDGE_FALLTHRU;
2511 new_edge
2512 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2513 new_edge->probability = old_edge->probability;
2514 if (!id->reset_location)
2515 new_edge->goto_locus = remap_location (locus, id);
2518 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2519 return false;
2521 /* When doing function splitting, we must decrease count of the return block
2522 which was previously reachable by block we did not copy. */
2523 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2524 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2525 if (old_edge->src->index != ENTRY_BLOCK
2526 && !old_edge->src->aux)
2527 new_bb->count -= old_edge->count ().apply_scale (num, den);
2529 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2531 gimple *copy_stmt;
2532 bool can_throw, nonlocal_goto;
2534 copy_stmt = gsi_stmt (si);
2535 if (!is_gimple_debug (copy_stmt))
2536 update_stmt (copy_stmt);
2538 /* Do this before the possible split_block. */
2539 gsi_next (&si);
2541 /* If this tree could throw an exception, there are two
2542 cases where we need to add abnormal edge(s): the
2543 tree wasn't in a region and there is a "current
2544 region" in the caller; or the original tree had
2545 EH edges. In both cases split the block after the tree,
2546 and add abnormal edge(s) as needed; we need both
2547 those from the callee and the caller.
2548 We check whether the copy can throw, because the const
2549 propagation can change an INDIRECT_REF which throws
2550 into a COMPONENT_REF which doesn't. If the copy
2551 can throw, the original could also throw. */
2552 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2553 nonlocal_goto
2554 = (stmt_can_make_abnormal_goto (copy_stmt)
2555 && !computed_goto_p (copy_stmt));
2557 if (can_throw || nonlocal_goto)
2559 if (!gsi_end_p (si))
2561 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2562 gsi_next (&si);
2563 if (gsi_end_p (si))
2564 need_debug_cleanup = true;
2566 if (!gsi_end_p (si))
2567 /* Note that bb's predecessor edges aren't necessarily
2568 right at this point; split_block doesn't care. */
2570 edge e = split_block (new_bb, copy_stmt);
2572 new_bb = e->dest;
2573 new_bb->aux = e->src->aux;
2574 si = gsi_start_bb (new_bb);
2578 bool update_probs = false;
2580 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2582 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2583 update_probs = true;
2585 else if (can_throw)
2587 make_eh_edges (copy_stmt);
2588 update_probs = true;
2591 /* EH edges may not match old edges. Copy as much as possible. */
2592 if (update_probs)
2594 edge e;
2595 edge_iterator ei;
2596 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2598 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2599 if ((old_edge->flags & EDGE_EH)
2600 && (e = find_edge (copy_stmt_bb,
2601 (basic_block) old_edge->dest->aux))
2602 && (e->flags & EDGE_EH))
2603 e->probability = old_edge->probability;
2605 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2606 if (e->flags & EDGE_EH)
2608 if (!e->probability.initialized_p ())
2609 e->probability = profile_probability::never ();
2610 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2612 if (id->eh_landing_pad_dest == NULL)
2613 id->eh_landing_pad_dest = e->dest;
2614 else
2615 gcc_assert (id->eh_landing_pad_dest == e->dest);
2621 /* If the call we inline cannot make abnormal goto do not add
2622 additional abnormal edges but only retain those already present
2623 in the original function body. */
2624 if (abnormal_goto_dest == NULL)
2625 nonlocal_goto = false;
2626 if (nonlocal_goto)
2628 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2630 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2631 nonlocal_goto = false;
2632 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2633 in OpenMP regions which aren't allowed to be left abnormally.
2634 So, no need to add abnormal edge in that case. */
2635 else if (is_gimple_call (copy_stmt)
2636 && gimple_call_internal_p (copy_stmt)
2637 && (gimple_call_internal_fn (copy_stmt)
2638 == IFN_ABNORMAL_DISPATCHER)
2639 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2640 nonlocal_goto = false;
2641 else
2642 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2643 EDGE_ABNORMAL);
2646 if ((can_throw || nonlocal_goto)
2647 && gimple_in_ssa_p (cfun))
2648 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2649 can_throw, nonlocal_goto);
2651 return need_debug_cleanup;
2654 /* Copy the PHIs. All blocks and edges are copied, some blocks
2655 was possibly split and new outgoing EH edges inserted.
2656 BB points to the block of original function and AUX pointers links
2657 the original and newly copied blocks. */
2659 static void
2660 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2662 basic_block const new_bb = (basic_block) bb->aux;
2663 edge_iterator ei;
2664 gphi *phi;
2665 gphi_iterator si;
2666 edge new_edge;
2667 bool inserted = false;
2669 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2671 tree res, new_res;
2672 gphi *new_phi;
2674 phi = si.phi ();
2675 res = PHI_RESULT (phi);
2676 new_res = res;
2677 if (!virtual_operand_p (res))
2679 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2680 if (EDGE_COUNT (new_bb->preds) == 0)
2682 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2683 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2685 else
2687 new_phi = create_phi_node (new_res, new_bb);
2688 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2690 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2691 bb);
2692 tree arg;
2693 tree new_arg;
2694 edge_iterator ei2;
2695 location_t locus;
2697 /* When doing partial cloning, we allow PHIs on the entry
2698 block as long as all the arguments are the same.
2699 Find any input edge to see argument to copy. */
2700 if (!old_edge)
2701 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2702 if (!old_edge->src->aux)
2703 break;
2705 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2706 new_arg = arg;
2707 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2708 gcc_assert (new_arg);
2709 /* With return slot optimization we can end up with
2710 non-gimple (foo *)&this->m, fix that here. */
2711 if (TREE_CODE (new_arg) != SSA_NAME
2712 && TREE_CODE (new_arg) != FUNCTION_DECL
2713 && !is_gimple_val (new_arg))
2715 gimple_seq stmts = NULL;
2716 new_arg = force_gimple_operand (new_arg, &stmts, true,
2717 NULL);
2718 gsi_insert_seq_on_edge (new_edge, stmts);
2719 inserted = true;
2721 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2722 if (id->reset_location)
2723 locus = input_location;
2724 else
2725 locus = remap_location (locus, id);
2726 add_phi_arg (new_phi, new_arg, new_edge, locus);
2732 /* Commit the delayed edge insertions. */
2733 if (inserted)
2734 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2735 gsi_commit_one_edge_insert (new_edge, NULL);
2739 /* Wrapper for remap_decl so it can be used as a callback. */
2741 static tree
2742 remap_decl_1 (tree decl, void *data)
2744 return remap_decl (decl, (copy_body_data *) data);
2747 /* Build struct function and associated datastructures for the new clone
2748 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2749 the cfun to the function of new_fndecl (and current_function_decl too). */
2751 static void
2752 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2754 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2756 if (!DECL_ARGUMENTS (new_fndecl))
2757 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2758 if (!DECL_RESULT (new_fndecl))
2759 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2761 /* Register specific tree functions. */
2762 gimple_register_cfg_hooks ();
2764 /* Get clean struct function. */
2765 push_struct_function (new_fndecl);
2767 /* We will rebuild these, so just sanity check that they are empty. */
2768 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2769 gcc_assert (cfun->local_decls == NULL);
2770 gcc_assert (cfun->cfg == NULL);
2771 gcc_assert (cfun->decl == new_fndecl);
2773 /* Copy items we preserve during cloning. */
2774 cfun->static_chain_decl = src_cfun->static_chain_decl;
2775 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2776 cfun->function_end_locus = src_cfun->function_end_locus;
2777 cfun->curr_properties = src_cfun->curr_properties;
2778 cfun->last_verified = src_cfun->last_verified;
2779 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2780 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2781 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2782 cfun->calls_eh_return = src_cfun->calls_eh_return;
2783 cfun->stdarg = src_cfun->stdarg;
2784 cfun->after_inlining = src_cfun->after_inlining;
2785 cfun->can_throw_non_call_exceptions
2786 = src_cfun->can_throw_non_call_exceptions;
2787 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2788 cfun->returns_struct = src_cfun->returns_struct;
2789 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2791 init_empty_tree_cfg ();
2793 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2795 profile_count num = count;
2796 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2797 profile_count::adjust_for_ipa_scaling (&num, &den);
2799 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2800 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2801 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2802 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2803 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2804 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2805 if (src_cfun->eh)
2806 init_eh_for_function ();
2808 if (src_cfun->gimple_df)
2810 init_tree_ssa (cfun);
2811 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2812 if (cfun->gimple_df->in_ssa_p)
2813 init_ssa_operands (cfun);
2817 /* Helper function for copy_cfg_body. Move debug stmts from the end
2818 of NEW_BB to the beginning of successor basic blocks when needed. If the
2819 successor has multiple predecessors, reset them, otherwise keep
2820 their value. */
2822 static void
2823 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2825 edge e;
2826 edge_iterator ei;
2827 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2829 if (gsi_end_p (si)
2830 || gsi_one_before_end_p (si)
2831 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2832 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2833 return;
2835 FOR_EACH_EDGE (e, ei, new_bb->succs)
2837 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2838 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2839 while (is_gimple_debug (gsi_stmt (ssi)))
2841 gimple *stmt = gsi_stmt (ssi);
2842 gdebug *new_stmt;
2843 tree var;
2844 tree value;
2846 /* For the last edge move the debug stmts instead of copying
2847 them. */
2848 if (ei_one_before_end_p (ei))
2850 si = ssi;
2851 gsi_prev (&ssi);
2852 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2854 gimple_debug_bind_reset_value (stmt);
2855 gimple_set_location (stmt, UNKNOWN_LOCATION);
2857 gsi_remove (&si, false);
2858 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2859 continue;
2862 if (gimple_debug_bind_p (stmt))
2864 var = gimple_debug_bind_get_var (stmt);
2865 if (single_pred_p (e->dest))
2867 value = gimple_debug_bind_get_value (stmt);
2868 value = unshare_expr (value);
2869 new_stmt = gimple_build_debug_bind (var, value, stmt);
2871 else
2872 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2874 else if (gimple_debug_source_bind_p (stmt))
2876 var = gimple_debug_source_bind_get_var (stmt);
2877 value = gimple_debug_source_bind_get_value (stmt);
2878 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2880 else if (gimple_debug_nonbind_marker_p (stmt))
2881 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2882 else
2883 gcc_unreachable ();
2884 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2885 id->debug_stmts.safe_push (new_stmt);
2886 gsi_prev (&ssi);
2891 /* Make a copy of the sub-loops of SRC_PARENT and place them
2892 as siblings of DEST_PARENT. */
2894 static void
2895 copy_loops (copy_body_data *id,
2896 class loop *dest_parent, class loop *src_parent)
2898 class loop *src_loop = src_parent->inner;
2899 while (src_loop)
2901 if (!id->blocks_to_copy
2902 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2904 class loop *dest_loop = alloc_loop ();
2906 /* Assign the new loop its header and latch and associate
2907 those with the new loop. */
2908 dest_loop->header = (basic_block)src_loop->header->aux;
2909 dest_loop->header->loop_father = dest_loop;
2910 if (src_loop->latch != NULL)
2912 dest_loop->latch = (basic_block)src_loop->latch->aux;
2913 dest_loop->latch->loop_father = dest_loop;
2916 /* Copy loop meta-data. */
2917 copy_loop_info (src_loop, dest_loop);
2918 if (dest_loop->unroll)
2919 cfun->has_unroll = true;
2920 if (dest_loop->force_vectorize)
2921 cfun->has_force_vectorize_loops = true;
2922 if (id->src_cfun->last_clique != 0)
2923 dest_loop->owned_clique
2924 = remap_dependence_clique (id,
2925 src_loop->owned_clique
2926 ? src_loop->owned_clique : 1);
2928 /* Finally place it into the loop array and the loop tree. */
2929 place_new_loop (cfun, dest_loop);
2930 flow_loop_tree_node_add (dest_parent, dest_loop);
2932 if (src_loop->simduid)
2934 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2935 cfun->has_simduid_loops = true;
2938 /* Recurse. */
2939 copy_loops (id, dest_loop, src_loop);
2941 src_loop = src_loop->next;
2945 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2947 void
2948 redirect_all_calls (copy_body_data * id, basic_block bb)
2950 gimple_stmt_iterator si;
2951 gimple *last = last_stmt (bb);
2952 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2954 gimple *stmt = gsi_stmt (si);
2955 if (is_gimple_call (stmt))
2957 tree old_lhs = gimple_call_lhs (stmt);
2958 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2959 if (edge)
2961 gimple *new_stmt
2962 = cgraph_edge::redirect_call_stmt_to_callee (edge);
2963 /* If IPA-SRA transformation, run as part of edge redirection,
2964 removed the LHS because it is unused, save it to
2965 killed_new_ssa_names so that we can prune it from debug
2966 statements. */
2967 if (old_lhs
2968 && TREE_CODE (old_lhs) == SSA_NAME
2969 && !gimple_call_lhs (new_stmt))
2971 if (!id->killed_new_ssa_names)
2972 id->killed_new_ssa_names = new hash_set<tree> (16);
2973 id->killed_new_ssa_names->add (old_lhs);
2976 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2977 gimple_purge_dead_eh_edges (bb);
2983 /* Make a copy of the body of FN so that it can be inserted inline in
2984 another function. Walks FN via CFG, returns new fndecl. */
2986 static tree
2987 copy_cfg_body (copy_body_data * id,
2988 basic_block entry_block_map, basic_block exit_block_map,
2989 basic_block new_entry)
2991 tree callee_fndecl = id->src_fn;
2992 /* Original cfun for the callee, doesn't change. */
2993 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2994 struct function *cfun_to_copy;
2995 basic_block bb;
2996 tree new_fndecl = NULL;
2997 bool need_debug_cleanup = false;
2998 int last;
2999 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3000 profile_count num = entry_block_map->count;
3002 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3004 /* Register specific tree functions. */
3005 gimple_register_cfg_hooks ();
3007 /* If we are inlining just region of the function, make sure to connect
3008 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
3009 part of loop, we must compute frequency and probability of
3010 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3011 probabilities of edges incoming from nonduplicated region. */
3012 if (new_entry)
3014 edge e;
3015 edge_iterator ei;
3016 den = profile_count::zero ();
3018 FOR_EACH_EDGE (e, ei, new_entry->preds)
3019 if (!e->src->aux)
3020 den += e->count ();
3021 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
3024 profile_count::adjust_for_ipa_scaling (&num, &den);
3026 /* Must have a CFG here at this point. */
3027 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3028 (DECL_STRUCT_FUNCTION (callee_fndecl)));
3031 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3032 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3033 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3034 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3036 /* Duplicate any exception-handling regions. */
3037 if (cfun->eh)
3038 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3039 remap_decl_1, id);
3041 /* Use aux pointers to map the original blocks to copy. */
3042 FOR_EACH_BB_FN (bb, cfun_to_copy)
3043 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3045 basic_block new_bb = copy_bb (id, bb, num, den);
3046 bb->aux = new_bb;
3047 new_bb->aux = bb;
3048 new_bb->loop_father = entry_block_map->loop_father;
3051 last = last_basic_block_for_fn (cfun);
3053 /* Now that we've duplicated the blocks, duplicate their edges. */
3054 basic_block abnormal_goto_dest = NULL;
3055 if (id->call_stmt
3056 && stmt_can_make_abnormal_goto (id->call_stmt))
3058 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3060 bb = gimple_bb (id->call_stmt);
3061 gsi_next (&gsi);
3062 if (gsi_end_p (gsi))
3063 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3065 FOR_ALL_BB_FN (bb, cfun_to_copy)
3066 if (!id->blocks_to_copy
3067 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3068 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3069 abnormal_goto_dest, id);
3071 if (id->eh_landing_pad_dest)
3073 add_clobbers_to_eh_landing_pad (id);
3074 id->eh_landing_pad_dest = NULL;
3077 if (new_entry)
3079 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3080 EDGE_FALLTHRU);
3081 e->probability = profile_probability::always ();
3084 /* Duplicate the loop tree, if available and wanted. */
3085 if (loops_for_fn (src_cfun) != NULL
3086 && current_loops != NULL)
3088 copy_loops (id, entry_block_map->loop_father,
3089 get_loop (src_cfun, 0));
3090 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3091 loops_state_set (LOOPS_NEED_FIXUP);
3094 /* If the loop tree in the source function needed fixup, mark the
3095 destination loop tree for fixup, too. */
3096 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3097 loops_state_set (LOOPS_NEED_FIXUP);
3099 if (gimple_in_ssa_p (cfun))
3100 FOR_ALL_BB_FN (bb, cfun_to_copy)
3101 if (!id->blocks_to_copy
3102 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3103 copy_phis_for_bb (bb, id);
3105 FOR_ALL_BB_FN (bb, cfun_to_copy)
3106 if (bb->aux)
3108 if (need_debug_cleanup
3109 && bb->index != ENTRY_BLOCK
3110 && bb->index != EXIT_BLOCK)
3111 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3112 /* Update call edge destinations. This cannot be done before loop
3113 info is updated, because we may split basic blocks. */
3114 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3115 && bb->index != ENTRY_BLOCK
3116 && bb->index != EXIT_BLOCK)
3117 redirect_all_calls (id, (basic_block)bb->aux);
3118 ((basic_block)bb->aux)->aux = NULL;
3119 bb->aux = NULL;
3122 /* Zero out AUX fields of newly created block during EH edge
3123 insertion. */
3124 for (; last < last_basic_block_for_fn (cfun); last++)
3126 if (need_debug_cleanup)
3127 maybe_move_debug_stmts_to_successors (id,
3128 BASIC_BLOCK_FOR_FN (cfun, last));
3129 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3130 /* Update call edge destinations. This cannot be done before loop
3131 info is updated, because we may split basic blocks. */
3132 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3133 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3135 entry_block_map->aux = NULL;
3136 exit_block_map->aux = NULL;
3138 if (id->eh_map)
3140 delete id->eh_map;
3141 id->eh_map = NULL;
3143 if (id->dependence_map)
3145 delete id->dependence_map;
3146 id->dependence_map = NULL;
3149 return new_fndecl;
3152 /* Copy the debug STMT using ID. We deal with these statements in a
3153 special way: if any variable in their VALUE expression wasn't
3154 remapped yet, we won't remap it, because that would get decl uids
3155 out of sync, causing codegen differences between -g and -g0. If
3156 this arises, we drop the VALUE expression altogether. */
3158 static void
3159 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3161 tree t, *n;
3162 struct walk_stmt_info wi;
3164 if (tree block = gimple_block (stmt))
3166 n = id->decl_map->get (block);
3167 gimple_set_block (stmt, n ? *n : id->block);
3170 if (gimple_debug_nonbind_marker_p (stmt))
3172 if (id->call_stmt && !gimple_block (stmt))
3174 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3175 gsi_remove (&gsi, true);
3177 return;
3180 /* Remap all the operands in COPY. */
3181 memset (&wi, 0, sizeof (wi));
3182 wi.info = id;
3184 processing_debug_stmt = 1;
3186 if (gimple_debug_source_bind_p (stmt))
3187 t = gimple_debug_source_bind_get_var (stmt);
3188 else if (gimple_debug_bind_p (stmt))
3189 t = gimple_debug_bind_get_var (stmt);
3190 else
3191 gcc_unreachable ();
3193 if (TREE_CODE (t) == PARM_DECL
3194 && id->debug_map
3195 && (n = id->debug_map->get (t)))
3197 gcc_assert (VAR_P (*n));
3198 t = *n;
3200 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3201 /* T is a non-localized variable. */;
3202 else
3203 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3205 if (gimple_debug_bind_p (stmt))
3207 gimple_debug_bind_set_var (stmt, t);
3209 if (gimple_debug_bind_has_value_p (stmt))
3210 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3211 remap_gimple_op_r, &wi, NULL);
3213 /* Punt if any decl couldn't be remapped. */
3214 if (processing_debug_stmt < 0)
3215 gimple_debug_bind_reset_value (stmt);
3217 else if (gimple_debug_source_bind_p (stmt))
3219 gimple_debug_source_bind_set_var (stmt, t);
3220 /* When inlining and source bind refers to one of the optimized
3221 away parameters, change the source bind into normal debug bind
3222 referring to the corresponding DEBUG_EXPR_DECL that should have
3223 been bound before the call stmt. */
3224 t = gimple_debug_source_bind_get_value (stmt);
3225 if (t != NULL_TREE
3226 && TREE_CODE (t) == PARM_DECL
3227 && id->call_stmt)
3229 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3230 unsigned int i;
3231 if (debug_args != NULL)
3233 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3234 if ((**debug_args)[i] == DECL_ORIGIN (t)
3235 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3237 t = (**debug_args)[i + 1];
3238 stmt->subcode = GIMPLE_DEBUG_BIND;
3239 gimple_debug_bind_set_value (stmt, t);
3240 break;
3244 if (gimple_debug_source_bind_p (stmt))
3245 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3246 remap_gimple_op_r, &wi, NULL);
3249 processing_debug_stmt = 0;
3251 update_stmt (stmt);
3254 /* Process deferred debug stmts. In order to give values better odds
3255 of being successfully remapped, we delay the processing of debug
3256 stmts until all other stmts that might require remapping are
3257 processed. */
3259 static void
3260 copy_debug_stmts (copy_body_data *id)
3262 size_t i;
3263 gdebug *stmt;
3265 if (!id->debug_stmts.exists ())
3266 return;
3268 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3269 copy_debug_stmt (stmt, id);
3271 id->debug_stmts.release ();
3274 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3275 another function. */
3277 static tree
3278 copy_tree_body (copy_body_data *id)
3280 tree fndecl = id->src_fn;
3281 tree body = DECL_SAVED_TREE (fndecl);
3283 walk_tree (&body, copy_tree_body_r, id, NULL);
3285 return body;
3288 /* Make a copy of the body of FN so that it can be inserted inline in
3289 another function. */
3291 static tree
3292 copy_body (copy_body_data *id,
3293 basic_block entry_block_map, basic_block exit_block_map,
3294 basic_block new_entry)
3296 tree fndecl = id->src_fn;
3297 tree body;
3299 /* If this body has a CFG, walk CFG and copy. */
3300 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3301 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3302 new_entry);
3303 copy_debug_stmts (id);
3304 delete id->killed_new_ssa_names;
3305 id->killed_new_ssa_names = NULL;
3307 return body;
3310 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3311 defined in function FN, or of a data member thereof. */
3313 static bool
3314 self_inlining_addr_expr (tree value, tree fn)
3316 tree var;
3318 if (TREE_CODE (value) != ADDR_EXPR)
3319 return false;
3321 var = get_base_address (TREE_OPERAND (value, 0));
3323 return var && auto_var_in_fn_p (var, fn);
3326 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3327 lexical block and line number information from base_stmt, if given,
3328 or from the last stmt of the block otherwise. */
3330 static gimple *
3331 insert_init_debug_bind (copy_body_data *id,
3332 basic_block bb, tree var, tree value,
3333 gimple *base_stmt)
3335 gimple *note;
3336 gimple_stmt_iterator gsi;
3337 tree tracked_var;
3339 if (!gimple_in_ssa_p (id->src_cfun))
3340 return NULL;
3342 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3343 return NULL;
3345 tracked_var = target_for_debug_bind (var);
3346 if (!tracked_var)
3347 return NULL;
3349 if (bb)
3351 gsi = gsi_last_bb (bb);
3352 if (!base_stmt && !gsi_end_p (gsi))
3353 base_stmt = gsi_stmt (gsi);
3356 note = gimple_build_debug_bind (tracked_var,
3357 value == error_mark_node
3358 ? NULL_TREE : unshare_expr (value),
3359 base_stmt);
3361 if (bb)
3363 if (!gsi_end_p (gsi))
3364 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3365 else
3366 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3369 return note;
3372 static void
3373 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3375 /* If VAR represents a zero-sized variable, it's possible that the
3376 assignment statement may result in no gimple statements. */
3377 if (init_stmt)
3379 gimple_stmt_iterator si = gsi_last_bb (bb);
3381 /* We can end up with init statements that store to a non-register
3382 from a rhs with a conversion. Handle that here by forcing the
3383 rhs into a temporary. gimple_regimplify_operands is not
3384 prepared to do this for us. */
3385 if (!is_gimple_debug (init_stmt)
3386 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3387 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3388 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3390 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3391 gimple_expr_type (init_stmt),
3392 gimple_assign_rhs1 (init_stmt));
3393 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3394 GSI_NEW_STMT);
3395 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3396 gimple_assign_set_rhs1 (init_stmt, rhs);
3398 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3399 if (!is_gimple_debug (init_stmt))
3401 gimple_regimplify_operands (init_stmt, &si);
3403 tree def = gimple_assign_lhs (init_stmt);
3404 insert_init_debug_bind (id, bb, def, def, init_stmt);
3409 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3410 if need be (which should only be necessary for invalid programs). Attempt
3411 to convert VAL to TYPE and return the result if it is possible, just return
3412 a zero constant of the given type if it fails. */
3414 tree
3415 force_value_to_type (tree type, tree value)
3417 /* If we can match up types by promotion/demotion do so. */
3418 if (fold_convertible_p (type, value))
3419 return fold_convert (type, value);
3421 /* ??? For valid programs we should not end up here.
3422 Still if we end up with truly mismatched types here, fall back
3423 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3424 GIMPLE to the following passes. */
3425 if (TREE_CODE (value) == WITH_SIZE_EXPR)
3426 return error_mark_node;
3427 else if (!is_gimple_reg_type (TREE_TYPE (value))
3428 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3429 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3430 else
3431 return build_zero_cst (type);
3434 /* Initialize parameter P with VALUE. If needed, produce init statement
3435 at the end of BB. When BB is NULL, we return init statement to be
3436 output later. */
3437 static gimple *
3438 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3439 basic_block bb, tree *vars)
3441 gimple *init_stmt = NULL;
3442 tree var;
3443 tree def = (gimple_in_ssa_p (cfun)
3444 ? ssa_default_def (id->src_cfun, p) : NULL);
3446 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3447 here since the type of this decl must be visible to the calling
3448 function. */
3449 var = copy_decl_to_var (p, id);
3451 /* Declare this new variable. */
3452 DECL_CHAIN (var) = *vars;
3453 *vars = var;
3455 /* Make gimplifier happy about this variable. */
3456 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3458 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3459 we would not need to create a new variable here at all, if it
3460 weren't for debug info. Still, we can just use the argument
3461 value. */
3462 if (TREE_READONLY (p)
3463 && !TREE_ADDRESSABLE (p)
3464 && value
3465 && !TREE_SIDE_EFFECTS (value)
3466 && !def)
3468 /* We may produce non-gimple trees by adding NOPs or introduce invalid
3469 sharing when the value is not constant or DECL. And we need to make
3470 sure that it cannot be modified from another path in the callee. */
3471 if ((is_gimple_min_invariant (value)
3472 || (DECL_P (value) && TREE_READONLY (value))
3473 || (auto_var_in_fn_p (value, id->dst_fn)
3474 && !TREE_ADDRESSABLE (value)))
3475 && useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))
3476 /* We have to be very careful about ADDR_EXPR. Make sure
3477 the base variable isn't a local variable of the inlined
3478 function, e.g., when doing recursive inlining, direct or
3479 mutually-recursive or whatever, which is why we don't
3480 just test whether fn == current_function_decl. */
3481 && ! self_inlining_addr_expr (value, fn))
3483 insert_decl_map (id, p, value);
3484 if (!id->debug_map)
3485 id->debug_map = new hash_map<tree, tree>;
3486 id->debug_map->put (p, var);
3487 return insert_init_debug_bind (id, bb, var, value, NULL);
3491 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3492 that way, when the PARM_DECL is encountered, it will be
3493 automatically replaced by the VAR_DECL. */
3494 insert_decl_map (id, p, var);
3496 /* Even if P was TREE_READONLY, the new VAR should not be.
3497 In the original code, we would have constructed a
3498 temporary, and then the function body would have never
3499 changed the value of P. However, now, we will be
3500 constructing VAR directly. The constructor body may
3501 change its value multiple times as it is being
3502 constructed. Therefore, it must not be TREE_READONLY;
3503 the back-end assumes that TREE_READONLY variable is
3504 assigned to only once. */
3505 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3506 TREE_READONLY (var) = 0;
3508 tree rhs = value;
3509 if (value
3510 && value != error_mark_node
3511 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3512 rhs = force_value_to_type (TREE_TYPE (p), value);
3514 /* If there is no setup required and we are in SSA, take the easy route
3515 replacing all SSA names representing the function parameter by the
3516 SSA name passed to function.
3518 We need to construct map for the variable anyway as it might be used
3519 in different SSA names when parameter is set in function.
3521 Do replacement at -O0 for const arguments replaced by constant.
3522 This is important for builtin_constant_p and other construct requiring
3523 constant argument to be visible in inlined function body. */
3524 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3525 && (optimize
3526 || (TREE_READONLY (p)
3527 && is_gimple_min_invariant (rhs)))
3528 && (TREE_CODE (rhs) == SSA_NAME
3529 || is_gimple_min_invariant (rhs))
3530 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3532 insert_decl_map (id, def, rhs);
3533 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3536 /* If the value of argument is never used, don't care about initializing
3537 it. */
3538 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3540 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3541 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3544 /* Initialize this VAR_DECL from the equivalent argument. Convert
3545 the argument to the proper type in case it was promoted. */
3546 if (value)
3548 if (rhs == error_mark_node)
3550 insert_decl_map (id, p, var);
3551 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3554 STRIP_USELESS_TYPE_CONVERSION (rhs);
3556 /* If we are in SSA form properly remap the default definition
3557 or assign to a dummy SSA name if the parameter is unused and
3558 we are not optimizing. */
3559 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3561 if (def)
3563 def = remap_ssa_name (def, id);
3564 init_stmt = gimple_build_assign (def, rhs);
3565 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3566 set_ssa_default_def (cfun, var, NULL);
3568 else if (!optimize)
3570 def = make_ssa_name (var);
3571 init_stmt = gimple_build_assign (def, rhs);
3574 else
3575 init_stmt = gimple_build_assign (var, rhs);
3577 if (bb && init_stmt)
3578 insert_init_stmt (id, bb, init_stmt);
3580 return init_stmt;
3583 /* Generate code to initialize the parameters of the function at the
3584 top of the stack in ID from the GIMPLE_CALL STMT. */
3586 static void
3587 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3588 tree fn, basic_block bb)
3590 tree parms;
3591 size_t i;
3592 tree p;
3593 tree vars = NULL_TREE;
3594 tree static_chain = gimple_call_chain (stmt);
3596 /* Figure out what the parameters are. */
3597 parms = DECL_ARGUMENTS (fn);
3599 /* Loop through the parameter declarations, replacing each with an
3600 equivalent VAR_DECL, appropriately initialized. */
3601 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3603 tree val;
3604 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3605 setup_one_parameter (id, p, val, fn, bb, &vars);
3607 /* After remapping parameters remap their types. This has to be done
3608 in a second loop over all parameters to appropriately remap
3609 variable sized arrays when the size is specified in a
3610 parameter following the array. */
3611 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3613 tree *varp = id->decl_map->get (p);
3614 if (varp && VAR_P (*varp))
3616 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3617 ? ssa_default_def (id->src_cfun, p) : NULL);
3618 tree var = *varp;
3619 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3620 /* Also remap the default definition if it was remapped
3621 to the default definition of the parameter replacement
3622 by the parameter setup. */
3623 if (def)
3625 tree *defp = id->decl_map->get (def);
3626 if (defp
3627 && TREE_CODE (*defp) == SSA_NAME
3628 && SSA_NAME_VAR (*defp) == var)
3629 TREE_TYPE (*defp) = TREE_TYPE (var);
3634 /* Initialize the static chain. */
3635 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3636 gcc_assert (fn != current_function_decl);
3637 if (p)
3639 /* No static chain? Seems like a bug in tree-nested.c. */
3640 gcc_assert (static_chain);
3642 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3645 declare_inline_vars (id->block, vars);
3649 /* Declare a return variable to replace the RESULT_DECL for the
3650 function we are calling. An appropriate DECL_STMT is returned.
3651 The USE_STMT is filled to contain a use of the declaration to
3652 indicate the return value of the function.
3654 RETURN_SLOT, if non-null is place where to store the result. It
3655 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3656 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3658 The return value is a (possibly null) value that holds the result
3659 as seen by the caller. */
3661 static tree
3662 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3663 basic_block entry_bb)
3665 tree callee = id->src_fn;
3666 tree result = DECL_RESULT (callee);
3667 tree callee_type = TREE_TYPE (result);
3668 tree caller_type;
3669 tree var, use;
3671 /* Handle type-mismatches in the function declaration return type
3672 vs. the call expression. */
3673 if (modify_dest)
3674 caller_type = TREE_TYPE (modify_dest);
3675 else if (return_slot)
3676 caller_type = TREE_TYPE (return_slot);
3677 else /* No LHS on the call. */
3678 caller_type = TREE_TYPE (TREE_TYPE (callee));
3680 /* We don't need to do anything for functions that don't return anything. */
3681 if (VOID_TYPE_P (callee_type))
3682 return NULL_TREE;
3684 /* If there was a return slot, then the return value is the
3685 dereferenced address of that object. */
3686 if (return_slot)
3688 /* The front end shouldn't have used both return_slot and
3689 a modify expression. */
3690 gcc_assert (!modify_dest);
3691 if (DECL_BY_REFERENCE (result))
3693 tree return_slot_addr = build_fold_addr_expr (return_slot);
3694 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3696 /* We are going to construct *&return_slot and we can't do that
3697 for variables believed to be not addressable.
3699 FIXME: This check possibly can match, because values returned
3700 via return slot optimization are not believed to have address
3701 taken by alias analysis. */
3702 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3703 var = return_slot_addr;
3704 mark_addressable (return_slot);
3706 else
3708 var = return_slot;
3709 gcc_assert (TREE_CODE (var) != SSA_NAME);
3710 if (TREE_ADDRESSABLE (result))
3711 mark_addressable (var);
3713 if (DECL_NOT_GIMPLE_REG_P (result)
3714 && DECL_P (var))
3715 DECL_NOT_GIMPLE_REG_P (var) = 1;
3717 if (!useless_type_conversion_p (callee_type, caller_type))
3718 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3720 use = NULL;
3721 goto done;
3724 /* All types requiring non-trivial constructors should have been handled. */
3725 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3727 /* Attempt to avoid creating a new temporary variable. */
3728 if (modify_dest
3729 && TREE_CODE (modify_dest) != SSA_NAME)
3731 bool use_it = false;
3733 /* We can't use MODIFY_DEST if there's type promotion involved. */
3734 if (!useless_type_conversion_p (callee_type, caller_type))
3735 use_it = false;
3737 /* ??? If we're assigning to a variable sized type, then we must
3738 reuse the destination variable, because we've no good way to
3739 create variable sized temporaries at this point. */
3740 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3741 use_it = true;
3743 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3744 reuse it as the result of the call directly. Don't do this if
3745 it would promote MODIFY_DEST to addressable. */
3746 else if (TREE_ADDRESSABLE (result))
3747 use_it = false;
3748 else
3750 tree base_m = get_base_address (modify_dest);
3752 /* If the base isn't a decl, then it's a pointer, and we don't
3753 know where that's going to go. */
3754 if (!DECL_P (base_m))
3755 use_it = false;
3756 else if (is_global_var (base_m))
3757 use_it = false;
3758 else if (DECL_NOT_GIMPLE_REG_P (result)
3759 && !DECL_NOT_GIMPLE_REG_P (base_m))
3760 use_it = false;
3761 else if (!TREE_ADDRESSABLE (base_m))
3762 use_it = true;
3765 if (use_it)
3767 var = modify_dest;
3768 use = NULL;
3769 goto done;
3773 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3775 var = copy_result_decl_to_var (result, id);
3776 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3778 /* Do not have the rest of GCC warn about this variable as it should
3779 not be visible to the user. */
3780 TREE_NO_WARNING (var) = 1;
3782 declare_inline_vars (id->block, var);
3784 /* Build the use expr. If the return type of the function was
3785 promoted, convert it back to the expected type. */
3786 use = var;
3787 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3789 /* If we can match up types by promotion/demotion do so. */
3790 if (fold_convertible_p (caller_type, var))
3791 use = fold_convert (caller_type, var);
3792 else
3794 /* ??? For valid programs we should not end up here.
3795 Still if we end up with truly mismatched types here, fall back
3796 to using a MEM_REF to not leak invalid GIMPLE to the following
3797 passes. */
3798 /* Prevent var from being written into SSA form. */
3799 if (is_gimple_reg_type (TREE_TYPE (var)))
3800 DECL_NOT_GIMPLE_REG_P (var) = true;
3801 use = fold_build2 (MEM_REF, caller_type,
3802 build_fold_addr_expr (var),
3803 build_int_cst (ptr_type_node, 0));
3807 STRIP_USELESS_TYPE_CONVERSION (use);
3809 if (DECL_BY_REFERENCE (result))
3811 TREE_ADDRESSABLE (var) = 1;
3812 var = build_fold_addr_expr (var);
3815 done:
3816 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3817 way, when the RESULT_DECL is encountered, it will be
3818 automatically replaced by the VAR_DECL.
3820 When returning by reference, ensure that RESULT_DECL remaps to
3821 gimple_val. */
3822 if (DECL_BY_REFERENCE (result)
3823 && !is_gimple_val (var))
3825 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3826 insert_decl_map (id, result, temp);
3827 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3828 it's default_def SSA_NAME. */
3829 if (gimple_in_ssa_p (id->src_cfun)
3830 && is_gimple_reg (result))
3832 temp = make_ssa_name (temp);
3833 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3835 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3837 else
3838 insert_decl_map (id, result, var);
3840 /* Remember this so we can ignore it in remap_decls. */
3841 id->retvar = var;
3842 return use;
3845 /* Determine if the function can be copied. If so return NULL. If
3846 not return a string describng the reason for failure. */
3848 const char *
3849 copy_forbidden (struct function *fun)
3851 const char *reason = fun->cannot_be_copied_reason;
3853 /* Only examine the function once. */
3854 if (fun->cannot_be_copied_set)
3855 return reason;
3857 /* We cannot copy a function that receives a non-local goto
3858 because we cannot remap the destination label used in the
3859 function that is performing the non-local goto. */
3860 /* ??? Actually, this should be possible, if we work at it.
3861 No doubt there's just a handful of places that simply
3862 assume it doesn't happen and don't substitute properly. */
3863 if (fun->has_nonlocal_label)
3865 reason = G_("function %q+F can never be copied "
3866 "because it receives a non-local goto");
3867 goto fail;
3870 if (fun->has_forced_label_in_static)
3872 reason = G_("function %q+F can never be copied because it saves "
3873 "address of local label in a static variable");
3874 goto fail;
3877 fail:
3878 fun->cannot_be_copied_reason = reason;
3879 fun->cannot_be_copied_set = true;
3880 return reason;
3884 static const char *inline_forbidden_reason;
3886 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3887 iff a function cannot be inlined. Also sets the reason why. */
3889 static tree
3890 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3891 struct walk_stmt_info *wip)
3893 tree fn = (tree) wip->info;
3894 tree t;
3895 gimple *stmt = gsi_stmt (*gsi);
3897 switch (gimple_code (stmt))
3899 case GIMPLE_CALL:
3900 /* Refuse to inline alloca call unless user explicitly forced so as
3901 this may change program's memory overhead drastically when the
3902 function using alloca is called in loop. In GCC present in
3903 SPEC2000 inlining into schedule_block cause it to require 2GB of
3904 RAM instead of 256MB. Don't do so for alloca calls emitted for
3905 VLA objects as those can't cause unbounded growth (they're always
3906 wrapped inside stack_save/stack_restore regions. */
3907 if (gimple_maybe_alloca_call_p (stmt)
3908 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3909 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3911 inline_forbidden_reason
3912 = G_("function %q+F can never be inlined because it uses "
3913 "alloca (override using the always_inline attribute)");
3914 *handled_ops_p = true;
3915 return fn;
3918 t = gimple_call_fndecl (stmt);
3919 if (t == NULL_TREE)
3920 break;
3922 /* We cannot inline functions that call setjmp. */
3923 if (setjmp_call_p (t))
3925 inline_forbidden_reason
3926 = G_("function %q+F can never be inlined because it uses setjmp");
3927 *handled_ops_p = true;
3928 return t;
3931 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3932 switch (DECL_FUNCTION_CODE (t))
3934 /* We cannot inline functions that take a variable number of
3935 arguments. */
3936 case BUILT_IN_VA_START:
3937 case BUILT_IN_NEXT_ARG:
3938 case BUILT_IN_VA_END:
3939 inline_forbidden_reason
3940 = G_("function %q+F can never be inlined because it "
3941 "uses variable argument lists");
3942 *handled_ops_p = true;
3943 return t;
3945 case BUILT_IN_LONGJMP:
3946 /* We can't inline functions that call __builtin_longjmp at
3947 all. The non-local goto machinery really requires the
3948 destination be in a different function. If we allow the
3949 function calling __builtin_longjmp to be inlined into the
3950 function calling __builtin_setjmp, Things will Go Awry. */
3951 inline_forbidden_reason
3952 = G_("function %q+F can never be inlined because "
3953 "it uses setjmp-longjmp exception handling");
3954 *handled_ops_p = true;
3955 return t;
3957 case BUILT_IN_NONLOCAL_GOTO:
3958 /* Similarly. */
3959 inline_forbidden_reason
3960 = G_("function %q+F can never be inlined because "
3961 "it uses non-local goto");
3962 *handled_ops_p = true;
3963 return t;
3965 case BUILT_IN_RETURN:
3966 case BUILT_IN_APPLY_ARGS:
3967 /* If a __builtin_apply_args caller would be inlined,
3968 it would be saving arguments of the function it has
3969 been inlined into. Similarly __builtin_return would
3970 return from the function the inline has been inlined into. */
3971 inline_forbidden_reason
3972 = G_("function %q+F can never be inlined because "
3973 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3974 *handled_ops_p = true;
3975 return t;
3977 default:
3978 break;
3980 break;
3982 case GIMPLE_GOTO:
3983 t = gimple_goto_dest (stmt);
3985 /* We will not inline a function which uses computed goto. The
3986 addresses of its local labels, which may be tucked into
3987 global storage, are of course not constant across
3988 instantiations, which causes unexpected behavior. */
3989 if (TREE_CODE (t) != LABEL_DECL)
3991 inline_forbidden_reason
3992 = G_("function %q+F can never be inlined "
3993 "because it contains a computed goto");
3994 *handled_ops_p = true;
3995 return t;
3997 break;
3999 default:
4000 break;
4003 *handled_ops_p = false;
4004 return NULL_TREE;
4007 /* Return true if FNDECL is a function that cannot be inlined into
4008 another one. */
4010 static bool
4011 inline_forbidden_p (tree fndecl)
4013 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
4014 struct walk_stmt_info wi;
4015 basic_block bb;
4016 bool forbidden_p = false;
4018 /* First check for shared reasons not to copy the code. */
4019 inline_forbidden_reason = copy_forbidden (fun);
4020 if (inline_forbidden_reason != NULL)
4021 return true;
4023 /* Next, walk the statements of the function looking for
4024 constraucts we can't handle, or are non-optimal for inlining. */
4025 hash_set<tree> visited_nodes;
4026 memset (&wi, 0, sizeof (wi));
4027 wi.info = (void *) fndecl;
4028 wi.pset = &visited_nodes;
4030 /* We cannot inline a function with a variable-sized parameter because we
4031 cannot materialize a temporary of such a type in the caller if need be.
4032 Note that the return case is not symmetrical because we can guarantee
4033 that a temporary is not needed by means of CALL_EXPR_RETURN_SLOT_OPT. */
4034 for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
4035 if (!poly_int_tree_p (DECL_SIZE (parm)))
4037 inline_forbidden_reason
4038 = G_("function %q+F can never be inlined because "
4039 "it has a VLA argument");
4040 return true;
4043 FOR_EACH_BB_FN (bb, fun)
4045 gimple *ret;
4046 gimple_seq seq = bb_seq (bb);
4047 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4048 forbidden_p = (ret != NULL);
4049 if (forbidden_p)
4050 break;
4053 return forbidden_p;
4056 /* Return false if the function FNDECL cannot be inlined on account of its
4057 attributes, true otherwise. */
4058 static bool
4059 function_attribute_inlinable_p (const_tree fndecl)
4061 if (targetm.attribute_table)
4063 const_tree a;
4065 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4067 const_tree name = get_attribute_name (a);
4068 int i;
4070 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4071 if (is_attribute_p (targetm.attribute_table[i].name, name))
4072 return targetm.function_attribute_inlinable_p (fndecl);
4076 return true;
4079 /* Returns nonzero if FN is a function that does not have any
4080 fundamental inline blocking properties. */
4082 bool
4083 tree_inlinable_function_p (tree fn)
4085 bool inlinable = true;
4086 bool do_warning;
4087 tree always_inline;
4089 /* If we've already decided this function shouldn't be inlined,
4090 there's no need to check again. */
4091 if (DECL_UNINLINABLE (fn))
4092 return false;
4094 /* We only warn for functions declared `inline' by the user. */
4095 do_warning = (opt_for_fn (fn, warn_inline)
4096 && DECL_DECLARED_INLINE_P (fn)
4097 && !DECL_NO_INLINE_WARNING_P (fn)
4098 && !DECL_IN_SYSTEM_HEADER (fn));
4100 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4102 if (flag_no_inline
4103 && always_inline == NULL)
4105 if (do_warning)
4106 warning (OPT_Winline, "function %q+F can never be inlined because it "
4107 "is suppressed using %<-fno-inline%>", fn);
4108 inlinable = false;
4111 else if (!function_attribute_inlinable_p (fn))
4113 if (do_warning)
4114 warning (OPT_Winline, "function %q+F can never be inlined because it "
4115 "uses attributes conflicting with inlining", fn);
4116 inlinable = false;
4119 else if (inline_forbidden_p (fn))
4121 /* See if we should warn about uninlinable functions. Previously,
4122 some of these warnings would be issued while trying to expand
4123 the function inline, but that would cause multiple warnings
4124 about functions that would for example call alloca. But since
4125 this a property of the function, just one warning is enough.
4126 As a bonus we can now give more details about the reason why a
4127 function is not inlinable. */
4128 if (always_inline)
4129 error (inline_forbidden_reason, fn);
4130 else if (do_warning)
4131 warning (OPT_Winline, inline_forbidden_reason, fn);
4133 inlinable = false;
4136 /* Squirrel away the result so that we don't have to check again. */
4137 DECL_UNINLINABLE (fn) = !inlinable;
4139 return inlinable;
4142 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4143 word size and take possible memcpy call into account and return
4144 cost based on whether optimizing for size or speed according to SPEED_P. */
4147 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4149 HOST_WIDE_INT size;
4151 gcc_assert (!VOID_TYPE_P (type));
4153 if (TREE_CODE (type) == VECTOR_TYPE)
4155 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4156 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4157 int orig_mode_size
4158 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4159 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4160 return ((orig_mode_size + simd_mode_size - 1)
4161 / simd_mode_size);
4164 size = int_size_in_bytes (type);
4166 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4167 /* Cost of a memcpy call, 3 arguments and the call. */
4168 return 4;
4169 else
4170 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4173 /* Returns cost of operation CODE, according to WEIGHTS */
4175 static int
4176 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4177 tree op1 ATTRIBUTE_UNUSED, tree op2)
4179 switch (code)
4181 /* These are "free" conversions, or their presumed cost
4182 is folded into other operations. */
4183 case RANGE_EXPR:
4184 CASE_CONVERT:
4185 case COMPLEX_EXPR:
4186 case PAREN_EXPR:
4187 case VIEW_CONVERT_EXPR:
4188 return 0;
4190 /* Assign cost of 1 to usual operations.
4191 ??? We may consider mapping RTL costs to this. */
4192 case COND_EXPR:
4193 case VEC_COND_EXPR:
4194 case VEC_PERM_EXPR:
4196 case PLUS_EXPR:
4197 case POINTER_PLUS_EXPR:
4198 case POINTER_DIFF_EXPR:
4199 case MINUS_EXPR:
4200 case MULT_EXPR:
4201 case MULT_HIGHPART_EXPR:
4203 case ADDR_SPACE_CONVERT_EXPR:
4204 case FIXED_CONVERT_EXPR:
4205 case FIX_TRUNC_EXPR:
4207 case NEGATE_EXPR:
4208 case FLOAT_EXPR:
4209 case MIN_EXPR:
4210 case MAX_EXPR:
4211 case ABS_EXPR:
4212 case ABSU_EXPR:
4214 case LSHIFT_EXPR:
4215 case RSHIFT_EXPR:
4216 case LROTATE_EXPR:
4217 case RROTATE_EXPR:
4219 case BIT_IOR_EXPR:
4220 case BIT_XOR_EXPR:
4221 case BIT_AND_EXPR:
4222 case BIT_NOT_EXPR:
4224 case TRUTH_ANDIF_EXPR:
4225 case TRUTH_ORIF_EXPR:
4226 case TRUTH_AND_EXPR:
4227 case TRUTH_OR_EXPR:
4228 case TRUTH_XOR_EXPR:
4229 case TRUTH_NOT_EXPR:
4231 case LT_EXPR:
4232 case LE_EXPR:
4233 case GT_EXPR:
4234 case GE_EXPR:
4235 case EQ_EXPR:
4236 case NE_EXPR:
4237 case ORDERED_EXPR:
4238 case UNORDERED_EXPR:
4240 case UNLT_EXPR:
4241 case UNLE_EXPR:
4242 case UNGT_EXPR:
4243 case UNGE_EXPR:
4244 case UNEQ_EXPR:
4245 case LTGT_EXPR:
4247 case CONJ_EXPR:
4249 case PREDECREMENT_EXPR:
4250 case PREINCREMENT_EXPR:
4251 case POSTDECREMENT_EXPR:
4252 case POSTINCREMENT_EXPR:
4254 case REALIGN_LOAD_EXPR:
4256 case WIDEN_PLUS_EXPR:
4257 case WIDEN_MINUS_EXPR:
4258 case WIDEN_SUM_EXPR:
4259 case WIDEN_MULT_EXPR:
4260 case DOT_PROD_EXPR:
4261 case SAD_EXPR:
4262 case WIDEN_MULT_PLUS_EXPR:
4263 case WIDEN_MULT_MINUS_EXPR:
4264 case WIDEN_LSHIFT_EXPR:
4266 case VEC_WIDEN_PLUS_HI_EXPR:
4267 case VEC_WIDEN_PLUS_LO_EXPR:
4268 case VEC_WIDEN_MINUS_HI_EXPR:
4269 case VEC_WIDEN_MINUS_LO_EXPR:
4270 case VEC_WIDEN_MULT_HI_EXPR:
4271 case VEC_WIDEN_MULT_LO_EXPR:
4272 case VEC_WIDEN_MULT_EVEN_EXPR:
4273 case VEC_WIDEN_MULT_ODD_EXPR:
4274 case VEC_UNPACK_HI_EXPR:
4275 case VEC_UNPACK_LO_EXPR:
4276 case VEC_UNPACK_FLOAT_HI_EXPR:
4277 case VEC_UNPACK_FLOAT_LO_EXPR:
4278 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4279 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4280 case VEC_PACK_TRUNC_EXPR:
4281 case VEC_PACK_SAT_EXPR:
4282 case VEC_PACK_FIX_TRUNC_EXPR:
4283 case VEC_PACK_FLOAT_EXPR:
4284 case VEC_WIDEN_LSHIFT_HI_EXPR:
4285 case VEC_WIDEN_LSHIFT_LO_EXPR:
4286 case VEC_DUPLICATE_EXPR:
4287 case VEC_SERIES_EXPR:
4289 return 1;
4291 /* Few special cases of expensive operations. This is useful
4292 to avoid inlining on functions having too many of these. */
4293 case TRUNC_DIV_EXPR:
4294 case CEIL_DIV_EXPR:
4295 case FLOOR_DIV_EXPR:
4296 case ROUND_DIV_EXPR:
4297 case EXACT_DIV_EXPR:
4298 case TRUNC_MOD_EXPR:
4299 case CEIL_MOD_EXPR:
4300 case FLOOR_MOD_EXPR:
4301 case ROUND_MOD_EXPR:
4302 case RDIV_EXPR:
4303 if (TREE_CODE (op2) != INTEGER_CST)
4304 return weights->div_mod_cost;
4305 return 1;
4307 /* Bit-field insertion needs several shift and mask operations. */
4308 case BIT_INSERT_EXPR:
4309 return 3;
4311 default:
4312 /* We expect a copy assignment with no operator. */
4313 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4314 return 0;
4319 /* Estimate number of instructions that will be created by expanding
4320 the statements in the statement sequence STMTS.
4321 WEIGHTS contains weights attributed to various constructs. */
4324 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4326 int cost;
4327 gimple_stmt_iterator gsi;
4329 cost = 0;
4330 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4331 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4333 return cost;
4337 /* Estimate number of instructions that will be created by expanding STMT.
4338 WEIGHTS contains weights attributed to various constructs. */
4341 estimate_num_insns (gimple *stmt, eni_weights *weights)
4343 unsigned cost, i;
4344 enum gimple_code code = gimple_code (stmt);
4345 tree lhs;
4346 tree rhs;
4348 switch (code)
4350 case GIMPLE_ASSIGN:
4351 /* Try to estimate the cost of assignments. We have three cases to
4352 deal with:
4353 1) Simple assignments to registers;
4354 2) Stores to things that must live in memory. This includes
4355 "normal" stores to scalars, but also assignments of large
4356 structures, or constructors of big arrays;
4358 Let us look at the first two cases, assuming we have "a = b + C":
4359 <GIMPLE_ASSIGN <var_decl "a">
4360 <plus_expr <var_decl "b"> <constant C>>
4361 If "a" is a GIMPLE register, the assignment to it is free on almost
4362 any target, because "a" usually ends up in a real register. Hence
4363 the only cost of this expression comes from the PLUS_EXPR, and we
4364 can ignore the GIMPLE_ASSIGN.
4365 If "a" is not a GIMPLE register, the assignment to "a" will most
4366 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4367 of moving something into "a", which we compute using the function
4368 estimate_move_cost. */
4369 if (gimple_clobber_p (stmt))
4370 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4372 lhs = gimple_assign_lhs (stmt);
4373 rhs = gimple_assign_rhs1 (stmt);
4375 cost = 0;
4377 /* Account for the cost of moving to / from memory. */
4378 if (gimple_store_p (stmt))
4379 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4380 if (gimple_assign_load_p (stmt))
4381 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4383 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4384 gimple_assign_rhs1 (stmt),
4385 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4386 == GIMPLE_BINARY_RHS
4387 ? gimple_assign_rhs2 (stmt) : NULL);
4388 break;
4390 case GIMPLE_COND:
4391 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4392 gimple_op (stmt, 0),
4393 gimple_op (stmt, 1));
4394 break;
4396 case GIMPLE_SWITCH:
4398 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4399 /* Take into account cost of the switch + guess 2 conditional jumps for
4400 each case label.
4402 TODO: once the switch expansion logic is sufficiently separated, we can
4403 do better job on estimating cost of the switch. */
4404 if (weights->time_based)
4405 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4406 else
4407 cost = gimple_switch_num_labels (switch_stmt) * 2;
4409 break;
4411 case GIMPLE_CALL:
4413 tree decl;
4415 if (gimple_call_internal_p (stmt))
4416 return 0;
4417 else if ((decl = gimple_call_fndecl (stmt))
4418 && fndecl_built_in_p (decl))
4420 /* Do not special case builtins where we see the body.
4421 This just confuse inliner. */
4422 struct cgraph_node *node;
4423 if (!(node = cgraph_node::get (decl))
4424 || node->definition)
4426 /* For buitins that are likely expanded to nothing or
4427 inlined do not account operand costs. */
4428 else if (is_simple_builtin (decl))
4429 return 0;
4430 else if (is_inexpensive_builtin (decl))
4431 return weights->target_builtin_call_cost;
4432 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4434 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4435 specialize the cheap expansion we do here.
4436 ??? This asks for a more general solution. */
4437 switch (DECL_FUNCTION_CODE (decl))
4439 case BUILT_IN_POW:
4440 case BUILT_IN_POWF:
4441 case BUILT_IN_POWL:
4442 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4443 && (real_equal
4444 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4445 &dconst2)))
4446 return estimate_operator_cost
4447 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4448 gimple_call_arg (stmt, 0));
4449 break;
4451 default:
4452 break;
4457 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4458 if (gimple_call_lhs (stmt))
4459 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4460 weights->time_based);
4461 for (i = 0; i < gimple_call_num_args (stmt); i++)
4463 tree arg = gimple_call_arg (stmt, i);
4464 cost += estimate_move_cost (TREE_TYPE (arg),
4465 weights->time_based);
4467 break;
4470 case GIMPLE_RETURN:
4471 return weights->return_cost;
4473 case GIMPLE_GOTO:
4474 case GIMPLE_LABEL:
4475 case GIMPLE_NOP:
4476 case GIMPLE_PHI:
4477 case GIMPLE_PREDICT:
4478 case GIMPLE_DEBUG:
4479 return 0;
4481 case GIMPLE_ASM:
4483 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4484 /* 1000 means infinity. This avoids overflows later
4485 with very long asm statements. */
4486 if (count > 1000)
4487 count = 1000;
4488 /* If this asm is asm inline, count anything as minimum size. */
4489 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4490 count = MIN (1, count);
4491 return MAX (1, count);
4494 case GIMPLE_RESX:
4495 /* This is either going to be an external function call with one
4496 argument, or two register copy statements plus a goto. */
4497 return 2;
4499 case GIMPLE_EH_DISPATCH:
4500 /* ??? This is going to turn into a switch statement. Ideally
4501 we'd have a look at the eh region and estimate the number of
4502 edges involved. */
4503 return 10;
4505 case GIMPLE_BIND:
4506 return estimate_num_insns_seq (
4507 gimple_bind_body (as_a <gbind *> (stmt)),
4508 weights);
4510 case GIMPLE_EH_FILTER:
4511 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4513 case GIMPLE_CATCH:
4514 return estimate_num_insns_seq (gimple_catch_handler (
4515 as_a <gcatch *> (stmt)),
4516 weights);
4518 case GIMPLE_TRY:
4519 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4520 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4522 /* OMP directives are generally very expensive. */
4524 case GIMPLE_OMP_RETURN:
4525 case GIMPLE_OMP_SECTIONS_SWITCH:
4526 case GIMPLE_OMP_ATOMIC_STORE:
4527 case GIMPLE_OMP_CONTINUE:
4528 /* ...except these, which are cheap. */
4529 return 0;
4531 case GIMPLE_OMP_ATOMIC_LOAD:
4532 return weights->omp_cost;
4534 case GIMPLE_OMP_FOR:
4535 return (weights->omp_cost
4536 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4537 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4539 case GIMPLE_OMP_PARALLEL:
4540 case GIMPLE_OMP_TASK:
4541 case GIMPLE_OMP_CRITICAL:
4542 case GIMPLE_OMP_MASTER:
4543 case GIMPLE_OMP_TASKGROUP:
4544 case GIMPLE_OMP_ORDERED:
4545 case GIMPLE_OMP_SCAN:
4546 case GIMPLE_OMP_SECTION:
4547 case GIMPLE_OMP_SECTIONS:
4548 case GIMPLE_OMP_SINGLE:
4549 case GIMPLE_OMP_TARGET:
4550 case GIMPLE_OMP_TEAMS:
4551 return (weights->omp_cost
4552 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4554 case GIMPLE_TRANSACTION:
4555 return (weights->tm_cost
4556 + estimate_num_insns_seq (gimple_transaction_body (
4557 as_a <gtransaction *> (stmt)),
4558 weights));
4560 default:
4561 gcc_unreachable ();
4564 return cost;
4567 /* Estimate number of instructions that will be created by expanding
4568 function FNDECL. WEIGHTS contains weights attributed to various
4569 constructs. */
4572 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4574 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4575 gimple_stmt_iterator bsi;
4576 basic_block bb;
4577 int n = 0;
4579 gcc_assert (my_function && my_function->cfg);
4580 FOR_EACH_BB_FN (bb, my_function)
4582 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4583 n += estimate_num_insns (gsi_stmt (bsi), weights);
4586 return n;
4590 /* Initializes weights used by estimate_num_insns. */
4592 void
4593 init_inline_once (void)
4595 eni_size_weights.call_cost = 1;
4596 eni_size_weights.indirect_call_cost = 3;
4597 eni_size_weights.target_builtin_call_cost = 1;
4598 eni_size_weights.div_mod_cost = 1;
4599 eni_size_weights.omp_cost = 40;
4600 eni_size_weights.tm_cost = 10;
4601 eni_size_weights.time_based = false;
4602 eni_size_weights.return_cost = 1;
4604 /* Estimating time for call is difficult, since we have no idea what the
4605 called function does. In the current uses of eni_time_weights,
4606 underestimating the cost does less harm than overestimating it, so
4607 we choose a rather small value here. */
4608 eni_time_weights.call_cost = 10;
4609 eni_time_weights.indirect_call_cost = 15;
4610 eni_time_weights.target_builtin_call_cost = 1;
4611 eni_time_weights.div_mod_cost = 10;
4612 eni_time_weights.omp_cost = 40;
4613 eni_time_weights.tm_cost = 40;
4614 eni_time_weights.time_based = true;
4615 eni_time_weights.return_cost = 2;
4619 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4621 static void
4622 prepend_lexical_block (tree current_block, tree new_block)
4624 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4625 BLOCK_SUBBLOCKS (current_block) = new_block;
4626 BLOCK_SUPERCONTEXT (new_block) = current_block;
4629 /* Add local variables from CALLEE to CALLER. */
4631 static inline void
4632 add_local_variables (struct function *callee, struct function *caller,
4633 copy_body_data *id)
4635 tree var;
4636 unsigned ix;
4638 FOR_EACH_LOCAL_DECL (callee, ix, var)
4639 if (!can_be_nonlocal (var, id))
4641 tree new_var = remap_decl (var, id);
4643 /* Remap debug-expressions. */
4644 if (VAR_P (new_var)
4645 && DECL_HAS_DEBUG_EXPR_P (var)
4646 && new_var != var)
4648 tree tem = DECL_DEBUG_EXPR (var);
4649 bool old_regimplify = id->regimplify;
4650 id->remapping_type_depth++;
4651 walk_tree (&tem, copy_tree_body_r, id, NULL);
4652 id->remapping_type_depth--;
4653 id->regimplify = old_regimplify;
4654 SET_DECL_DEBUG_EXPR (new_var, tem);
4655 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4657 add_local_decl (caller, new_var);
4661 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4662 have brought in or introduced any debug stmts for SRCVAR. */
4664 static inline void
4665 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4667 tree *remappedvarp = id->decl_map->get (srcvar);
4669 if (!remappedvarp)
4670 return;
4672 if (!VAR_P (*remappedvarp))
4673 return;
4675 if (*remappedvarp == id->retvar)
4676 return;
4678 tree tvar = target_for_debug_bind (*remappedvarp);
4679 if (!tvar)
4680 return;
4682 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4683 id->call_stmt);
4684 gimple_seq_add_stmt (bindings, stmt);
4687 /* For each inlined variable for which we may have debug bind stmts,
4688 add before GSI a final debug stmt resetting it, marking the end of
4689 its life, so that var-tracking knows it doesn't have to compute
4690 further locations for it. */
4692 static inline void
4693 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4695 tree var;
4696 unsigned ix;
4697 gimple_seq bindings = NULL;
4699 if (!gimple_in_ssa_p (id->src_cfun))
4700 return;
4702 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4703 return;
4705 for (var = DECL_ARGUMENTS (id->src_fn);
4706 var; var = DECL_CHAIN (var))
4707 reset_debug_binding (id, var, &bindings);
4709 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4710 reset_debug_binding (id, var, &bindings);
4712 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4715 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4717 static bool
4718 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4719 bitmap to_purge)
4721 tree use_retvar;
4722 tree fn;
4723 hash_map<tree, tree> *dst;
4724 hash_map<tree, tree> *st = NULL;
4725 tree return_slot;
4726 tree modify_dest;
4727 struct cgraph_edge *cg_edge;
4728 cgraph_inline_failed_t reason;
4729 basic_block return_block;
4730 edge e;
4731 gimple_stmt_iterator gsi, stmt_gsi;
4732 bool successfully_inlined = false;
4733 bool purge_dead_abnormal_edges;
4734 gcall *call_stmt;
4735 unsigned int prop_mask, src_properties;
4736 struct function *dst_cfun;
4737 tree simduid;
4738 use_operand_p use;
4739 gimple *simtenter_stmt = NULL;
4740 vec<tree> *simtvars_save;
4741 clone_info *info;
4743 /* The gimplifier uses input_location in too many places, such as
4744 internal_get_tmp_var (). */
4745 location_t saved_location = input_location;
4746 input_location = gimple_location (stmt);
4748 /* From here on, we're only interested in CALL_EXPRs. */
4749 call_stmt = dyn_cast <gcall *> (stmt);
4750 if (!call_stmt)
4751 goto egress;
4753 cg_edge = id->dst_node->get_edge (stmt);
4754 gcc_checking_assert (cg_edge);
4755 /* First, see if we can figure out what function is being called.
4756 If we cannot, then there is no hope of inlining the function. */
4757 if (cg_edge->indirect_unknown_callee)
4758 goto egress;
4759 fn = cg_edge->callee->decl;
4760 gcc_checking_assert (fn);
4762 /* If FN is a declaration of a function in a nested scope that was
4763 globally declared inline, we don't set its DECL_INITIAL.
4764 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4765 C++ front-end uses it for cdtors to refer to their internal
4766 declarations, that are not real functions. Fortunately those
4767 don't have trees to be saved, so we can tell by checking their
4768 gimple_body. */
4769 if (!DECL_INITIAL (fn)
4770 && DECL_ABSTRACT_ORIGIN (fn)
4771 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4772 fn = DECL_ABSTRACT_ORIGIN (fn);
4774 /* Don't try to inline functions that are not well-suited to inlining. */
4775 if (cg_edge->inline_failed)
4777 reason = cg_edge->inline_failed;
4778 /* If this call was originally indirect, we do not want to emit any
4779 inlining related warnings or sorry messages because there are no
4780 guarantees regarding those. */
4781 if (cg_edge->indirect_inlining_edge)
4782 goto egress;
4784 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4785 /* For extern inline functions that get redefined we always
4786 silently ignored always_inline flag. Better behavior would
4787 be to be able to keep both bodies and use extern inline body
4788 for inlining, but we can't do that because frontends overwrite
4789 the body. */
4790 && !cg_edge->callee->redefined_extern_inline
4791 /* During early inline pass, report only when optimization is
4792 not turned on. */
4793 && (symtab->global_info_ready
4794 || !optimize
4795 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4796 /* PR 20090218-1_0.c. Body can be provided by another module. */
4797 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4799 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4800 cgraph_inline_failed_string (reason));
4801 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4802 inform (gimple_location (stmt), "called from here");
4803 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4804 inform (DECL_SOURCE_LOCATION (cfun->decl),
4805 "called from this function");
4807 else if (opt_for_fn (fn, warn_inline)
4808 && DECL_DECLARED_INLINE_P (fn)
4809 && !DECL_NO_INLINE_WARNING_P (fn)
4810 && !DECL_IN_SYSTEM_HEADER (fn)
4811 && reason != CIF_UNSPECIFIED
4812 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4813 /* Do not warn about not inlined recursive calls. */
4814 && !cg_edge->recursive_p ()
4815 /* Avoid warnings during early inline pass. */
4816 && symtab->global_info_ready)
4818 auto_diagnostic_group d;
4819 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4820 fn, _(cgraph_inline_failed_string (reason))))
4822 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4823 inform (gimple_location (stmt), "called from here");
4824 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4825 inform (DECL_SOURCE_LOCATION (cfun->decl),
4826 "called from this function");
4829 goto egress;
4831 id->src_node = cg_edge->callee;
4833 /* If callee is thunk, all we need is to adjust the THIS pointer
4834 and redirect to function being thunked. */
4835 if (id->src_node->thunk)
4837 cgraph_edge *edge;
4838 tree virtual_offset = NULL;
4839 profile_count count = cg_edge->count;
4840 tree op;
4841 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4842 thunk_info *info = thunk_info::get (id->src_node);
4844 cgraph_edge::remove (cg_edge);
4845 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4846 gimple_uid (stmt),
4847 profile_count::one (),
4848 profile_count::one (),
4849 true);
4850 edge->count = count;
4851 if (info->virtual_offset_p)
4852 virtual_offset = size_int (info->virtual_value);
4853 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4854 NULL);
4855 gsi_insert_before (&iter, gimple_build_assign (op,
4856 gimple_call_arg (stmt, 0)),
4857 GSI_NEW_STMT);
4858 gcc_assert (info->this_adjusting);
4859 op = thunk_adjust (&iter, op, 1, info->fixed_offset,
4860 virtual_offset, info->indirect_offset);
4862 gimple_call_set_arg (stmt, 0, op);
4863 gimple_call_set_fndecl (stmt, edge->callee->decl);
4864 update_stmt (stmt);
4865 id->src_node->remove ();
4866 successfully_inlined = expand_call_inline (bb, stmt, id, to_purge);
4867 maybe_remove_unused_call_args (cfun, stmt);
4868 /* This used to return true even though we do fail to inline in
4869 some cases. See PR98525. */
4870 goto egress;
4872 fn = cg_edge->callee->decl;
4873 cg_edge->callee->get_untransformed_body ();
4875 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4876 cg_edge->callee->verify ();
4878 /* We will be inlining this callee. */
4879 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4881 /* Update the callers EH personality. */
4882 if (DECL_FUNCTION_PERSONALITY (fn))
4883 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4884 = DECL_FUNCTION_PERSONALITY (fn);
4886 /* Split the block before the GIMPLE_CALL. */
4887 stmt_gsi = gsi_for_stmt (stmt);
4888 gsi_prev (&stmt_gsi);
4889 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4890 bb = e->src;
4891 return_block = e->dest;
4892 remove_edge (e);
4894 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4895 been the source of abnormal edges. In this case, schedule
4896 the removal of dead abnormal edges. */
4897 gsi = gsi_start_bb (return_block);
4898 gsi_next (&gsi);
4899 purge_dead_abnormal_edges = gsi_end_p (gsi);
4901 stmt_gsi = gsi_start_bb (return_block);
4903 /* Build a block containing code to initialize the arguments, the
4904 actual inline expansion of the body, and a label for the return
4905 statements within the function to jump to. The type of the
4906 statement expression is the return type of the function call.
4907 ??? If the call does not have an associated block then we will
4908 remap all callee blocks to NULL, effectively dropping most of
4909 its debug information. This should only happen for calls to
4910 artificial decls inserted by the compiler itself. We need to
4911 either link the inlined blocks into the caller block tree or
4912 not refer to them in any way to not break GC for locations. */
4913 if (tree block = gimple_block (stmt))
4915 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4916 to make inlined_function_outer_scope_p return true on this BLOCK. */
4917 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4918 if (loc == UNKNOWN_LOCATION)
4919 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4920 if (loc == UNKNOWN_LOCATION)
4921 loc = BUILTINS_LOCATION;
4922 id->block = make_node (BLOCK);
4923 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4924 BLOCK_SOURCE_LOCATION (id->block) = loc;
4925 prepend_lexical_block (block, id->block);
4928 /* Local declarations will be replaced by their equivalents in this map. */
4929 st = id->decl_map;
4930 id->decl_map = new hash_map<tree, tree>;
4931 dst = id->debug_map;
4932 id->debug_map = NULL;
4933 if (flag_stack_reuse != SR_NONE)
4934 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4936 /* Record the function we are about to inline. */
4937 id->src_fn = fn;
4938 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4939 id->reset_location = DECL_IGNORED_P (fn);
4940 id->call_stmt = call_stmt;
4942 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4943 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4944 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4945 simtvars_save = id->dst_simt_vars;
4946 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4947 && (simduid = bb->loop_father->simduid) != NULL_TREE
4948 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4949 && single_imm_use (simduid, &use, &simtenter_stmt)
4950 && is_gimple_call (simtenter_stmt)
4951 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4952 vec_alloc (id->dst_simt_vars, 0);
4953 else
4954 id->dst_simt_vars = NULL;
4956 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4957 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4959 /* If the src function contains an IFN_VA_ARG, then so will the dst
4960 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4961 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4962 src_properties = id->src_cfun->curr_properties & prop_mask;
4963 if (src_properties != prop_mask)
4964 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4965 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4966 id->dst_node->calls_declare_variant_alt
4967 |= id->src_node->calls_declare_variant_alt;
4969 gcc_assert (!id->src_cfun->after_inlining);
4971 id->entry_bb = bb;
4972 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4974 gimple_stmt_iterator si = gsi_last_bb (bb);
4975 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4976 NOT_TAKEN),
4977 GSI_NEW_STMT);
4979 initialize_inlined_parameters (id, stmt, fn, bb);
4980 if (debug_nonbind_markers_p && debug_inline_points && id->block
4981 && inlined_function_outer_scope_p (id->block))
4983 gimple_stmt_iterator si = gsi_last_bb (bb);
4984 gsi_insert_after (&si, gimple_build_debug_inline_entry
4985 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4986 GSI_NEW_STMT);
4989 if (DECL_INITIAL (fn))
4991 if (gimple_block (stmt))
4993 tree *var;
4995 prepend_lexical_block (id->block,
4996 remap_blocks (DECL_INITIAL (fn), id));
4997 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4998 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4999 == NULL_TREE));
5000 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
5001 otherwise for DWARF DW_TAG_formal_parameter will not be children of
5002 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
5003 under it. The parameters can be then evaluated in the debugger,
5004 but don't show in backtraces. */
5005 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
5006 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
5008 tree v = *var;
5009 *var = TREE_CHAIN (v);
5010 TREE_CHAIN (v) = BLOCK_VARS (id->block);
5011 BLOCK_VARS (id->block) = v;
5013 else
5014 var = &TREE_CHAIN (*var);
5016 else
5017 remap_blocks_to_null (DECL_INITIAL (fn), id);
5020 /* Return statements in the function body will be replaced by jumps
5021 to the RET_LABEL. */
5022 gcc_assert (DECL_INITIAL (fn));
5023 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
5025 /* Find the LHS to which the result of this call is assigned. */
5026 return_slot = NULL;
5027 if (gimple_call_lhs (stmt))
5029 modify_dest = gimple_call_lhs (stmt);
5031 /* The function which we are inlining might not return a value,
5032 in which case we should issue a warning that the function
5033 does not return a value. In that case the optimizers will
5034 see that the variable to which the value is assigned was not
5035 initialized. We do not want to issue a warning about that
5036 uninitialized variable. */
5037 if (DECL_P (modify_dest))
5038 TREE_NO_WARNING (modify_dest) = 1;
5040 if (gimple_call_return_slot_opt_p (call_stmt))
5042 return_slot = modify_dest;
5043 modify_dest = NULL;
5046 else
5047 modify_dest = NULL;
5049 /* If we are inlining a call to the C++ operator new, we don't want
5050 to use type based alias analysis on the return value. Otherwise
5051 we may get confused if the compiler sees that the inlined new
5052 function returns a pointer which was just deleted. See bug
5053 33407. */
5054 if (DECL_IS_OPERATOR_NEW_P (fn))
5056 return_slot = NULL;
5057 modify_dest = NULL;
5060 /* Declare the return variable for the function. */
5061 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5063 /* Add local vars in this inlined callee to caller. */
5064 add_local_variables (id->src_cfun, cfun, id);
5066 info = clone_info::get (id->src_node);
5067 if (info && info->performed_splits)
5069 clone_info *dst_info = clone_info::get_create (id->dst_node);
5070 /* Any calls from the inlined function will be turned into calls from the
5071 function we inline into. We must preserve notes about how to split
5072 parameters such calls should be redirected/updated. */
5073 unsigned len = vec_safe_length (info->performed_splits);
5074 for (unsigned i = 0; i < len; i++)
5076 ipa_param_performed_split ps
5077 = (*info->performed_splits)[i];
5078 ps.dummy_decl = remap_decl (ps.dummy_decl, id);
5079 vec_safe_push (dst_info->performed_splits, ps);
5082 if (flag_checking)
5084 len = vec_safe_length (dst_info->performed_splits);
5085 for (unsigned i = 0; i < len; i++)
5087 ipa_param_performed_split *ps1
5088 = &(*dst_info->performed_splits)[i];
5089 for (unsigned j = i + 1; j < len; j++)
5091 ipa_param_performed_split *ps2
5092 = &(*dst_info->performed_splits)[j];
5093 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
5094 || ps1->unit_offset != ps2->unit_offset);
5100 if (dump_enabled_p ())
5102 char buf[128];
5103 snprintf (buf, sizeof(buf), "%4.2f",
5104 cg_edge->sreal_frequency ().to_double ());
5105 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5106 call_stmt,
5107 "Inlining %C to %C with frequency %s\n",
5108 id->src_node, id->dst_node, buf);
5109 if (dump_file && (dump_flags & TDF_DETAILS))
5111 id->src_node->dump (dump_file);
5112 id->dst_node->dump (dump_file);
5116 /* This is it. Duplicate the callee body. Assume callee is
5117 pre-gimplified. Note that we must not alter the caller
5118 function in any way before this point, as this CALL_EXPR may be
5119 a self-referential call; if we're calling ourselves, we need to
5120 duplicate our body before altering anything. */
5121 copy_body (id, bb, return_block, NULL);
5123 reset_debug_bindings (id, stmt_gsi);
5125 if (flag_stack_reuse != SR_NONE)
5126 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5127 if (!TREE_THIS_VOLATILE (p))
5129 /* The value associated with P is a local temporary only if
5130 there is no value associated with P in the debug map. */
5131 tree *varp = id->decl_map->get (p);
5132 if (varp
5133 && VAR_P (*varp)
5134 && !is_gimple_reg (*varp)
5135 && !(id->debug_map && id->debug_map->get (p)))
5137 tree clobber = build_clobber (TREE_TYPE (*varp));
5138 gimple *clobber_stmt;
5139 clobber_stmt = gimple_build_assign (*varp, clobber);
5140 gimple_set_location (clobber_stmt, gimple_location (stmt));
5141 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5145 /* Reset the escaped solution. */
5146 if (cfun->gimple_df)
5147 pt_solution_reset (&cfun->gimple_df->escaped);
5149 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5150 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5152 size_t nargs = gimple_call_num_args (simtenter_stmt);
5153 vec<tree> *vars = id->dst_simt_vars;
5154 auto_vec<tree> newargs (nargs + vars->length ());
5155 for (size_t i = 0; i < nargs; i++)
5156 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5157 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5159 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5160 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5162 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5163 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5164 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5165 gsi_replace (&gsi, g, false);
5167 vec_free (id->dst_simt_vars);
5168 id->dst_simt_vars = simtvars_save;
5170 /* Clean up. */
5171 if (id->debug_map)
5173 delete id->debug_map;
5174 id->debug_map = dst;
5176 delete id->decl_map;
5177 id->decl_map = st;
5179 /* Unlink the calls virtual operands before replacing it. */
5180 unlink_stmt_vdef (stmt);
5181 if (gimple_vdef (stmt)
5182 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5183 release_ssa_name (gimple_vdef (stmt));
5185 /* If the inlined function returns a result that we care about,
5186 substitute the GIMPLE_CALL with an assignment of the return
5187 variable to the LHS of the call. That is, if STMT was
5188 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5189 if (use_retvar && gimple_call_lhs (stmt))
5191 gimple *old_stmt = stmt;
5192 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5193 gimple_set_location (stmt, gimple_location (old_stmt));
5194 gsi_replace (&stmt_gsi, stmt, false);
5195 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5196 /* Append a clobber for id->retvar if easily possible. */
5197 if (flag_stack_reuse != SR_NONE
5198 && id->retvar
5199 && VAR_P (id->retvar)
5200 && id->retvar != return_slot
5201 && id->retvar != modify_dest
5202 && !TREE_THIS_VOLATILE (id->retvar)
5203 && !is_gimple_reg (id->retvar)
5204 && !stmt_ends_bb_p (stmt))
5206 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5207 gimple *clobber_stmt;
5208 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5209 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5210 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5213 else
5215 /* Handle the case of inlining a function with no return
5216 statement, which causes the return value to become undefined. */
5217 if (gimple_call_lhs (stmt)
5218 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5220 tree name = gimple_call_lhs (stmt);
5221 tree var = SSA_NAME_VAR (name);
5222 tree def = var ? ssa_default_def (cfun, var) : NULL;
5224 if (def)
5226 /* If the variable is used undefined, make this name
5227 undefined via a move. */
5228 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5229 gsi_replace (&stmt_gsi, stmt, true);
5231 else
5233 if (!var)
5235 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5236 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5238 /* Otherwise make this variable undefined. */
5239 gsi_remove (&stmt_gsi, true);
5240 set_ssa_default_def (cfun, var, name);
5241 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5244 /* Replace with a clobber for id->retvar. */
5245 else if (flag_stack_reuse != SR_NONE
5246 && id->retvar
5247 && VAR_P (id->retvar)
5248 && id->retvar != return_slot
5249 && id->retvar != modify_dest
5250 && !TREE_THIS_VOLATILE (id->retvar)
5251 && !is_gimple_reg (id->retvar))
5253 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5254 gimple *clobber_stmt;
5255 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5256 gimple_set_location (clobber_stmt, gimple_location (stmt));
5257 gsi_replace (&stmt_gsi, clobber_stmt, false);
5258 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5260 else
5261 gsi_remove (&stmt_gsi, true);
5264 if (purge_dead_abnormal_edges)
5265 bitmap_set_bit (to_purge, return_block->index);
5267 /* If the value of the new expression is ignored, that's OK. We
5268 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5269 the equivalent inlined version either. */
5270 if (is_gimple_assign (stmt))
5272 gcc_assert (gimple_assign_single_p (stmt)
5273 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5274 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5277 id->add_clobbers_to_eh_landing_pads = 0;
5279 /* Output the inlining info for this abstract function, since it has been
5280 inlined. If we don't do this now, we can lose the information about the
5281 variables in the function when the blocks get blown away as soon as we
5282 remove the cgraph node. */
5283 if (gimple_block (stmt))
5284 (*debug_hooks->outlining_inline_function) (fn);
5286 /* Update callgraph if needed. */
5287 cg_edge->callee->remove ();
5289 id->block = NULL_TREE;
5290 id->retvar = NULL_TREE;
5291 successfully_inlined = true;
5293 egress:
5294 input_location = saved_location;
5295 return successfully_inlined;
5298 /* Expand call statements reachable from STMT_P.
5299 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5300 in a MODIFY_EXPR. */
5302 static bool
5303 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5304 bitmap to_purge)
5306 gimple_stmt_iterator gsi;
5307 bool inlined = false;
5309 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5311 gimple *stmt = gsi_stmt (gsi);
5312 gsi_prev (&gsi);
5314 if (is_gimple_call (stmt)
5315 && !gimple_call_internal_p (stmt))
5316 inlined |= expand_call_inline (bb, stmt, id, to_purge);
5319 return inlined;
5323 /* Walk all basic blocks created after FIRST and try to fold every statement
5324 in the STATEMENTS pointer set. */
5326 static void
5327 fold_marked_statements (int first, hash_set<gimple *> *statements)
5329 auto_bitmap to_purge;
5331 auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5332 auto_sbitmap visited (last_basic_block_for_fn (cfun));
5333 bitmap_clear (visited);
5335 stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5336 while (!stack.is_empty ())
5338 /* Look at the edge on the top of the stack. */
5339 edge e = stack.pop ();
5340 basic_block dest = e->dest;
5342 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5343 || bitmap_bit_p (visited, dest->index))
5344 continue;
5346 bitmap_set_bit (visited, dest->index);
5348 if (dest->index >= first)
5349 for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5350 !gsi_end_p (gsi); gsi_next (&gsi))
5352 if (!statements->contains (gsi_stmt (gsi)))
5353 continue;
5355 gimple *old_stmt = gsi_stmt (gsi);
5356 tree old_decl = (is_gimple_call (old_stmt)
5357 ? gimple_call_fndecl (old_stmt) : 0);
5358 if (old_decl && fndecl_built_in_p (old_decl))
5360 /* Folding builtins can create multiple instructions,
5361 we need to look at all of them. */
5362 gimple_stmt_iterator i2 = gsi;
5363 gsi_prev (&i2);
5364 if (fold_stmt (&gsi))
5366 gimple *new_stmt;
5367 /* If a builtin at the end of a bb folded into nothing,
5368 the following loop won't work. */
5369 if (gsi_end_p (gsi))
5371 cgraph_update_edges_for_call_stmt (old_stmt,
5372 old_decl, NULL);
5373 break;
5375 if (gsi_end_p (i2))
5376 i2 = gsi_start_bb (dest);
5377 else
5378 gsi_next (&i2);
5379 while (1)
5381 new_stmt = gsi_stmt (i2);
5382 update_stmt (new_stmt);
5383 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5384 new_stmt);
5386 if (new_stmt == gsi_stmt (gsi))
5388 /* It is okay to check only for the very last
5389 of these statements. If it is a throwing
5390 statement nothing will change. If it isn't
5391 this can remove EH edges. If that weren't
5392 correct then because some intermediate stmts
5393 throw, but not the last one. That would mean
5394 we'd have to split the block, which we can't
5395 here and we'd loose anyway. And as builtins
5396 probably never throw, this all
5397 is mood anyway. */
5398 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5399 new_stmt))
5400 bitmap_set_bit (to_purge, dest->index);
5401 break;
5403 gsi_next (&i2);
5407 else if (fold_stmt (&gsi))
5409 /* Re-read the statement from GSI as fold_stmt() may
5410 have changed it. */
5411 gimple *new_stmt = gsi_stmt (gsi);
5412 update_stmt (new_stmt);
5414 if (is_gimple_call (old_stmt)
5415 || is_gimple_call (new_stmt))
5416 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5417 new_stmt);
5419 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5420 bitmap_set_bit (to_purge, dest->index);
5424 if (EDGE_COUNT (dest->succs) > 0)
5426 /* Avoid warnings emitted from folding statements that
5427 became unreachable because of inlined function parameter
5428 propagation. */
5429 e = find_taken_edge (dest, NULL_TREE);
5430 if (e)
5431 stack.quick_push (e);
5432 else
5434 edge_iterator ei;
5435 FOR_EACH_EDGE (e, ei, dest->succs)
5436 stack.safe_push (e);
5441 gimple_purge_all_dead_eh_edges (to_purge);
5444 /* Expand calls to inline functions in the body of FN. */
5446 unsigned int
5447 optimize_inline_calls (tree fn)
5449 copy_body_data id;
5450 basic_block bb;
5451 int last = n_basic_blocks_for_fn (cfun);
5452 bool inlined_p = false;
5454 /* Clear out ID. */
5455 memset (&id, 0, sizeof (id));
5457 id.src_node = id.dst_node = cgraph_node::get (fn);
5458 gcc_assert (id.dst_node->definition);
5459 id.dst_fn = fn;
5460 /* Or any functions that aren't finished yet. */
5461 if (current_function_decl)
5462 id.dst_fn = current_function_decl;
5464 id.copy_decl = copy_decl_maybe_to_var;
5465 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5466 id.transform_new_cfg = false;
5467 id.transform_return_to_modify = true;
5468 id.transform_parameter = true;
5469 id.transform_lang_insert_block = NULL;
5470 id.statements_to_fold = new hash_set<gimple *>;
5472 push_gimplify_context ();
5474 /* We make no attempts to keep dominance info up-to-date. */
5475 free_dominance_info (CDI_DOMINATORS);
5476 free_dominance_info (CDI_POST_DOMINATORS);
5478 /* Register specific gimple functions. */
5479 gimple_register_cfg_hooks ();
5481 /* Reach the trees by walking over the CFG, and note the
5482 enclosing basic-blocks in the call edges. */
5483 /* We walk the blocks going forward, because inlined function bodies
5484 will split id->current_basic_block, and the new blocks will
5485 follow it; we'll trudge through them, processing their CALL_EXPRs
5486 along the way. */
5487 auto_bitmap to_purge;
5488 FOR_EACH_BB_FN (bb, cfun)
5489 inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5491 pop_gimplify_context (NULL);
5493 if (flag_checking)
5495 struct cgraph_edge *e;
5497 id.dst_node->verify ();
5499 /* Double check that we inlined everything we are supposed to inline. */
5500 for (e = id.dst_node->callees; e; e = e->next_callee)
5501 gcc_assert (e->inline_failed);
5504 /* If we didn't inline into the function there is nothing to do. */
5505 if (!inlined_p)
5507 delete id.statements_to_fold;
5508 return 0;
5511 /* Fold queued statements. */
5512 update_max_bb_count ();
5513 fold_marked_statements (last, id.statements_to_fold);
5514 delete id.statements_to_fold;
5516 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5517 We need to do this after fold_marked_statements since that may walk
5518 the SSA use-def chain. */
5519 unsigned i;
5520 bitmap_iterator bi;
5521 EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5523 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5524 if (bb)
5526 gimple_purge_dead_eh_edges (bb);
5527 gimple_purge_dead_abnormal_call_edges (bb);
5531 gcc_assert (!id.debug_stmts.exists ());
5533 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5534 number_blocks (fn);
5536 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5537 id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5539 if (flag_checking)
5540 id.dst_node->verify ();
5542 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5543 not possible yet - the IPA passes might make various functions to not
5544 throw and they don't care to proactively update local EH info. This is
5545 done later in fixup_cfg pass that also execute the verification. */
5546 return (TODO_update_ssa
5547 | TODO_cleanup_cfg
5548 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5549 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5550 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5551 ? TODO_rebuild_frequencies : 0));
5554 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5556 tree
5557 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5559 enum tree_code code = TREE_CODE (*tp);
5560 enum tree_code_class cl = TREE_CODE_CLASS (code);
5562 /* We make copies of most nodes. */
5563 if (IS_EXPR_CODE_CLASS (cl)
5564 || code == TREE_LIST
5565 || code == TREE_VEC
5566 || code == TYPE_DECL
5567 || code == OMP_CLAUSE)
5569 /* Because the chain gets clobbered when we make a copy, we save it
5570 here. */
5571 tree chain = NULL_TREE, new_tree;
5573 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5574 chain = TREE_CHAIN (*tp);
5576 /* Copy the node. */
5577 new_tree = copy_node (*tp);
5579 *tp = new_tree;
5581 /* Now, restore the chain, if appropriate. That will cause
5582 walk_tree to walk into the chain as well. */
5583 if (code == PARM_DECL
5584 || code == TREE_LIST
5585 || code == OMP_CLAUSE)
5586 TREE_CHAIN (*tp) = chain;
5588 /* For now, we don't update BLOCKs when we make copies. So, we
5589 have to nullify all BIND_EXPRs. */
5590 if (TREE_CODE (*tp) == BIND_EXPR)
5591 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5593 else if (code == CONSTRUCTOR)
5595 /* CONSTRUCTOR nodes need special handling because
5596 we need to duplicate the vector of elements. */
5597 tree new_tree;
5599 new_tree = copy_node (*tp);
5600 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5601 *tp = new_tree;
5603 else if (code == STATEMENT_LIST)
5604 /* We used to just abort on STATEMENT_LIST, but we can run into them
5605 with statement-expressions (c++/40975). */
5606 copy_statement_list (tp);
5607 else if (TREE_CODE_CLASS (code) == tcc_type)
5608 *walk_subtrees = 0;
5609 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5610 *walk_subtrees = 0;
5611 else if (TREE_CODE_CLASS (code) == tcc_constant)
5612 *walk_subtrees = 0;
5613 return NULL_TREE;
5616 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5617 information indicating to what new SAVE_EXPR this one should be mapped,
5618 use that one. Otherwise, create a new node and enter it in ST. FN is
5619 the function into which the copy will be placed. */
5621 static void
5622 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5624 tree *n;
5625 tree t;
5627 /* See if we already encountered this SAVE_EXPR. */
5628 n = st->get (*tp);
5630 /* If we didn't already remap this SAVE_EXPR, do so now. */
5631 if (!n)
5633 t = copy_node (*tp);
5635 /* Remember this SAVE_EXPR. */
5636 st->put (*tp, t);
5637 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5638 st->put (t, t);
5640 else
5642 /* We've already walked into this SAVE_EXPR; don't do it again. */
5643 *walk_subtrees = 0;
5644 t = *n;
5647 /* Replace this SAVE_EXPR with the copy. */
5648 *tp = t;
5651 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5652 label, copies the declaration and enters it in the splay_tree in DATA (which
5653 is really a 'copy_body_data *'. */
5655 static tree
5656 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5657 bool *handled_ops_p ATTRIBUTE_UNUSED,
5658 struct walk_stmt_info *wi)
5660 copy_body_data *id = (copy_body_data *) wi->info;
5661 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5663 if (stmt)
5665 tree decl = gimple_label_label (stmt);
5667 /* Copy the decl and remember the copy. */
5668 insert_decl_map (id, decl, id->copy_decl (decl, id));
5671 return NULL_TREE;
5674 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5675 struct walk_stmt_info *wi);
5677 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5678 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5679 remaps all local declarations to appropriate replacements in gimple
5680 operands. */
5682 static tree
5683 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5685 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5686 copy_body_data *id = (copy_body_data *) wi->info;
5687 hash_map<tree, tree> *st = id->decl_map;
5688 tree *n;
5689 tree expr = *tp;
5691 /* For recursive invocations this is no longer the LHS itself. */
5692 bool is_lhs = wi->is_lhs;
5693 wi->is_lhs = false;
5695 if (TREE_CODE (expr) == SSA_NAME)
5697 *tp = remap_ssa_name (*tp, id);
5698 *walk_subtrees = 0;
5699 if (is_lhs)
5700 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5702 /* Only a local declaration (variable or label). */
5703 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5704 || TREE_CODE (expr) == LABEL_DECL)
5706 /* Lookup the declaration. */
5707 n = st->get (expr);
5709 /* If it's there, remap it. */
5710 if (n)
5711 *tp = *n;
5712 *walk_subtrees = 0;
5714 else if (TREE_CODE (expr) == STATEMENT_LIST
5715 || TREE_CODE (expr) == BIND_EXPR
5716 || TREE_CODE (expr) == SAVE_EXPR)
5717 gcc_unreachable ();
5718 else if (TREE_CODE (expr) == TARGET_EXPR)
5720 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5721 It's OK for this to happen if it was part of a subtree that
5722 isn't immediately expanded, such as operand 2 of another
5723 TARGET_EXPR. */
5724 if (!TREE_OPERAND (expr, 1))
5726 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5727 TREE_OPERAND (expr, 3) = NULL_TREE;
5730 else if (TREE_CODE (expr) == OMP_CLAUSE)
5732 /* Before the omplower pass completes, some OMP clauses can contain
5733 sequences that are neither copied by gimple_seq_copy nor walked by
5734 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5735 in those situations, we have to copy and process them explicitely. */
5737 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5739 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5740 seq = duplicate_remap_omp_clause_seq (seq, wi);
5741 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5743 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5745 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5746 seq = duplicate_remap_omp_clause_seq (seq, wi);
5747 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5749 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5751 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5752 seq = duplicate_remap_omp_clause_seq (seq, wi);
5753 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5754 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5755 seq = duplicate_remap_omp_clause_seq (seq, wi);
5756 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5760 /* Keep iterating. */
5761 return NULL_TREE;
5765 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5766 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5767 remaps all local declarations to appropriate replacements in gimple
5768 statements. */
5770 static tree
5771 replace_locals_stmt (gimple_stmt_iterator *gsip,
5772 bool *handled_ops_p ATTRIBUTE_UNUSED,
5773 struct walk_stmt_info *wi)
5775 copy_body_data *id = (copy_body_data *) wi->info;
5776 gimple *gs = gsi_stmt (*gsip);
5778 if (gbind *stmt = dyn_cast <gbind *> (gs))
5780 tree block = gimple_bind_block (stmt);
5782 if (block)
5784 remap_block (&block, id);
5785 gimple_bind_set_block (stmt, block);
5788 /* This will remap a lot of the same decls again, but this should be
5789 harmless. */
5790 if (gimple_bind_vars (stmt))
5792 tree old_var, decls = gimple_bind_vars (stmt);
5794 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5795 if (!can_be_nonlocal (old_var, id)
5796 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5797 remap_decl (old_var, id);
5799 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5800 id->prevent_decl_creation_for_types = true;
5801 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5802 id->prevent_decl_creation_for_types = false;
5806 /* Keep iterating. */
5807 return NULL_TREE;
5810 /* Create a copy of SEQ and remap all decls in it. */
5812 static gimple_seq
5813 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5815 if (!seq)
5816 return NULL;
5818 /* If there are any labels in OMP sequences, they can be only referred to in
5819 the sequence itself and therefore we can do both here. */
5820 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5821 gimple_seq copy = gimple_seq_copy (seq);
5822 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5823 return copy;
5826 /* Copies everything in SEQ and replaces variables and labels local to
5827 current_function_decl. */
5829 gimple_seq
5830 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5832 copy_body_data id;
5833 struct walk_stmt_info wi;
5834 gimple_seq copy;
5836 /* There's nothing to do for NULL_TREE. */
5837 if (seq == NULL)
5838 return seq;
5840 /* Set up ID. */
5841 memset (&id, 0, sizeof (id));
5842 id.src_fn = current_function_decl;
5843 id.dst_fn = current_function_decl;
5844 id.src_cfun = cfun;
5845 id.decl_map = new hash_map<tree, tree>;
5846 id.debug_map = NULL;
5848 id.copy_decl = copy_decl_no_change;
5849 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5850 id.transform_new_cfg = false;
5851 id.transform_return_to_modify = false;
5852 id.transform_parameter = false;
5853 id.transform_lang_insert_block = NULL;
5855 /* Walk the tree once to find local labels. */
5856 memset (&wi, 0, sizeof (wi));
5857 hash_set<tree> visited;
5858 wi.info = &id;
5859 wi.pset = &visited;
5860 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5862 copy = gimple_seq_copy (seq);
5864 /* Walk the copy, remapping decls. */
5865 memset (&wi, 0, sizeof (wi));
5866 wi.info = &id;
5867 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5869 /* Clean up. */
5870 delete id.decl_map;
5871 if (id.debug_map)
5872 delete id.debug_map;
5873 if (id.dependence_map)
5875 delete id.dependence_map;
5876 id.dependence_map = NULL;
5879 return copy;
5883 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5885 static tree
5886 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5888 if (*tp == data)
5889 return (tree) data;
5890 else
5891 return NULL;
5894 DEBUG_FUNCTION bool
5895 debug_find_tree (tree top, tree search)
5897 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5901 /* Declare the variables created by the inliner. Add all the variables in
5902 VARS to BIND_EXPR. */
5904 static void
5905 declare_inline_vars (tree block, tree vars)
5907 tree t;
5908 for (t = vars; t; t = DECL_CHAIN (t))
5910 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5911 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5912 add_local_decl (cfun, t);
5915 if (block)
5916 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5919 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5920 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5921 VAR_DECL translation. */
5923 tree
5924 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5926 /* Don't generate debug information for the copy if we wouldn't have
5927 generated it for the copy either. */
5928 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5929 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5931 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5932 declaration inspired this copy. */
5933 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5935 /* The new variable/label has no RTL, yet. */
5936 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5937 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5938 SET_DECL_RTL (copy, 0);
5939 /* For vector typed decls make sure to update DECL_MODE according
5940 to the new function context. */
5941 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5942 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5944 /* These args would always appear unused, if not for this. */
5945 TREE_USED (copy) = 1;
5947 /* Set the context for the new declaration. */
5948 if (!DECL_CONTEXT (decl))
5949 /* Globals stay global. */
5951 else if (DECL_CONTEXT (decl) != id->src_fn)
5952 /* Things that weren't in the scope of the function we're inlining
5953 from aren't in the scope we're inlining to, either. */
5955 else if (TREE_STATIC (decl))
5956 /* Function-scoped static variables should stay in the original
5957 function. */
5959 else
5961 /* Ordinary automatic local variables are now in the scope of the
5962 new function. */
5963 DECL_CONTEXT (copy) = id->dst_fn;
5964 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5966 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5967 DECL_ATTRIBUTES (copy)
5968 = tree_cons (get_identifier ("omp simt private"), NULL,
5969 DECL_ATTRIBUTES (copy));
5970 id->dst_simt_vars->safe_push (copy);
5974 return copy;
5977 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5978 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5979 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5981 tree
5982 copy_decl_to_var (tree decl, copy_body_data *id)
5984 tree copy, type;
5986 gcc_assert (TREE_CODE (decl) == PARM_DECL
5987 || TREE_CODE (decl) == RESULT_DECL);
5989 type = TREE_TYPE (decl);
5991 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5992 VAR_DECL, DECL_NAME (decl), type);
5993 if (DECL_PT_UID_SET_P (decl))
5994 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5995 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5996 TREE_READONLY (copy) = TREE_READONLY (decl);
5997 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5998 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
5999 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
6001 return copy_decl_for_dup_finish (id, decl, copy);
6004 /* Like copy_decl_to_var, but create a return slot object instead of a
6005 pointer variable for return by invisible reference. */
6007 static tree
6008 copy_result_decl_to_var (tree decl, copy_body_data *id)
6010 tree copy, type;
6012 gcc_assert (TREE_CODE (decl) == PARM_DECL
6013 || TREE_CODE (decl) == RESULT_DECL);
6015 type = TREE_TYPE (decl);
6016 if (DECL_BY_REFERENCE (decl))
6017 type = TREE_TYPE (type);
6019 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
6020 VAR_DECL, DECL_NAME (decl), type);
6021 if (DECL_PT_UID_SET_P (decl))
6022 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
6023 TREE_READONLY (copy) = TREE_READONLY (decl);
6024 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6025 if (!DECL_BY_REFERENCE (decl))
6027 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6028 DECL_NOT_GIMPLE_REG_P (copy)
6029 = (DECL_NOT_GIMPLE_REG_P (decl)
6030 /* RESULT_DECLs are treated special by needs_to_live_in_memory,
6031 mirror that to the created VAR_DECL. */
6032 || (TREE_CODE (decl) == RESULT_DECL
6033 && aggregate_value_p (decl, id->src_fn)));
6036 return copy_decl_for_dup_finish (id, decl, copy);
6039 tree
6040 copy_decl_no_change (tree decl, copy_body_data *id)
6042 tree copy;
6044 copy = copy_node (decl);
6046 /* The COPY is not abstract; it will be generated in DST_FN. */
6047 DECL_ABSTRACT_P (copy) = false;
6048 lang_hooks.dup_lang_specific_decl (copy);
6050 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6051 been taken; it's for internal bookkeeping in expand_goto_internal. */
6052 if (TREE_CODE (copy) == LABEL_DECL)
6054 TREE_ADDRESSABLE (copy) = 0;
6055 LABEL_DECL_UID (copy) = -1;
6058 return copy_decl_for_dup_finish (id, decl, copy);
6061 static tree
6062 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6064 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6065 return copy_decl_to_var (decl, id);
6066 else
6067 return copy_decl_no_change (decl, id);
6070 /* Return a copy of the function's argument tree without any modifications. */
6072 static tree
6073 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6075 tree arg, *parg;
6076 tree new_parm = NULL;
6078 parg = &new_parm;
6079 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6081 tree new_tree = remap_decl (arg, id);
6082 if (TREE_CODE (new_tree) != PARM_DECL)
6083 new_tree = id->copy_decl (arg, id);
6084 lang_hooks.dup_lang_specific_decl (new_tree);
6085 *parg = new_tree;
6086 parg = &DECL_CHAIN (new_tree);
6088 return new_parm;
6091 /* Return a copy of the function's static chain. */
6092 static tree
6093 copy_static_chain (tree static_chain, copy_body_data * id)
6095 tree *chain_copy, *pvar;
6097 chain_copy = &static_chain;
6098 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6100 tree new_tree = remap_decl (*pvar, id);
6101 lang_hooks.dup_lang_specific_decl (new_tree);
6102 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6103 *pvar = new_tree;
6105 return static_chain;
6108 /* Return true if the function is allowed to be versioned.
6109 This is a guard for the versioning functionality. */
6111 bool
6112 tree_versionable_function_p (tree fndecl)
6114 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6115 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6118 /* Update clone info after duplication. */
6120 static void
6121 update_clone_info (copy_body_data * id)
6123 clone_info *dst_info = clone_info::get (id->dst_node);
6124 vec<ipa_param_performed_split, va_gc> *cur_performed_splits
6125 = dst_info ? dst_info->performed_splits : NULL;
6126 if (cur_performed_splits)
6128 unsigned len = cur_performed_splits->length ();
6129 for (unsigned i = 0; i < len; i++)
6131 ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
6132 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6136 struct cgraph_node *node;
6137 if (!id->dst_node->clones)
6138 return;
6139 for (node = id->dst_node->clones; node != id->dst_node;)
6141 /* First update replace maps to match the new body. */
6142 clone_info *info = clone_info::get (node);
6143 if (info && info->tree_map)
6145 unsigned int i;
6146 for (i = 0; i < vec_safe_length (info->tree_map); i++)
6148 struct ipa_replace_map *replace_info;
6149 replace_info = (*info->tree_map)[i];
6150 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6153 if (info && info->performed_splits)
6155 unsigned len = vec_safe_length (info->performed_splits);
6156 for (unsigned i = 0; i < len; i++)
6158 ipa_param_performed_split *ps
6159 = &(*info->performed_splits)[i];
6160 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6163 if (unsigned len = vec_safe_length (cur_performed_splits))
6165 /* We do not want to add current performed splits when we are saving
6166 a copy of function body for later during inlining, that would just
6167 duplicate all entries. So let's have a look whether anything
6168 referring to the first dummy_decl is present. */
6169 if (!info)
6170 info = clone_info::get_create (node);
6171 unsigned dst_len = vec_safe_length (info->performed_splits);
6172 ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6173 for (unsigned i = 0; i < dst_len; i++)
6174 if ((*info->performed_splits)[i].dummy_decl
6175 == first->dummy_decl)
6177 len = 0;
6178 break;
6181 for (unsigned i = 0; i < len; i++)
6182 vec_safe_push (info->performed_splits,
6183 (*cur_performed_splits)[i]);
6184 if (flag_checking)
6186 for (unsigned i = 0; i < dst_len; i++)
6188 ipa_param_performed_split *ps1
6189 = &(*info->performed_splits)[i];
6190 for (unsigned j = i + 1; j < dst_len; j++)
6192 ipa_param_performed_split *ps2
6193 = &(*info->performed_splits)[j];
6194 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6195 || ps1->unit_offset != ps2->unit_offset);
6201 if (node->clones)
6202 node = node->clones;
6203 else if (node->next_sibling_clone)
6204 node = node->next_sibling_clone;
6205 else
6207 while (node != id->dst_node && !node->next_sibling_clone)
6208 node = node->clone_of;
6209 if (node != id->dst_node)
6210 node = node->next_sibling_clone;
6215 /* Create a copy of a function's tree.
6216 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6217 of the original function and the new copied function
6218 respectively. In case we want to replace a DECL
6219 tree with another tree while duplicating the function's
6220 body, TREE_MAP represents the mapping between these
6221 trees. If UPDATE_CLONES is set, the call_stmt fields
6222 of edges of clones of the function will be updated.
6224 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6225 function parameters and return value) should be modified).
6226 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6227 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6229 void
6230 tree_function_versioning (tree old_decl, tree new_decl,
6231 vec<ipa_replace_map *, va_gc> *tree_map,
6232 ipa_param_adjustments *param_adjustments,
6233 bool update_clones, bitmap blocks_to_copy,
6234 basic_block new_entry)
6236 struct cgraph_node *old_version_node;
6237 struct cgraph_node *new_version_node;
6238 copy_body_data id;
6239 tree p;
6240 unsigned i;
6241 struct ipa_replace_map *replace_info;
6242 basic_block old_entry_block, bb;
6243 auto_vec<gimple *, 10> init_stmts;
6244 tree vars = NULL_TREE;
6246 /* We can get called recursively from expand_call_inline via clone
6247 materialization. While expand_call_inline maintains input_location
6248 we cannot tolerate it to leak into the materialized clone. */
6249 location_t saved_location = input_location;
6250 input_location = UNKNOWN_LOCATION;
6252 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6253 && TREE_CODE (new_decl) == FUNCTION_DECL);
6254 DECL_POSSIBLY_INLINED (old_decl) = 1;
6256 old_version_node = cgraph_node::get (old_decl);
6257 gcc_checking_assert (old_version_node);
6258 new_version_node = cgraph_node::get (new_decl);
6259 gcc_checking_assert (new_version_node);
6261 /* Copy over debug args. */
6262 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6264 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6265 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6266 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6267 old_debug_args = decl_debug_args_lookup (old_decl);
6268 if (old_debug_args)
6270 new_debug_args = decl_debug_args_insert (new_decl);
6271 *new_debug_args = vec_safe_copy (*old_debug_args);
6275 /* Output the inlining info for this abstract function, since it has been
6276 inlined. If we don't do this now, we can lose the information about the
6277 variables in the function when the blocks get blown away as soon as we
6278 remove the cgraph node. */
6279 (*debug_hooks->outlining_inline_function) (old_decl);
6281 DECL_ARTIFICIAL (new_decl) = 1;
6282 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6283 if (DECL_ORIGIN (old_decl) == old_decl)
6284 old_version_node->used_as_abstract_origin = true;
6285 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6287 /* Prepare the data structures for the tree copy. */
6288 memset (&id, 0, sizeof (id));
6290 /* Generate a new name for the new version. */
6291 id.statements_to_fold = new hash_set<gimple *>;
6293 id.decl_map = new hash_map<tree, tree>;
6294 id.debug_map = NULL;
6295 id.src_fn = old_decl;
6296 id.dst_fn = new_decl;
6297 id.src_node = old_version_node;
6298 id.dst_node = new_version_node;
6299 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6300 id.blocks_to_copy = blocks_to_copy;
6302 id.copy_decl = copy_decl_no_change;
6303 id.transform_call_graph_edges
6304 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6305 id.transform_new_cfg = true;
6306 id.transform_return_to_modify = false;
6307 id.transform_parameter = false;
6308 id.transform_lang_insert_block = NULL;
6310 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6311 (DECL_STRUCT_FUNCTION (old_decl));
6312 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6313 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6314 initialize_cfun (new_decl, old_decl,
6315 new_entry ? new_entry->count : old_entry_block->count);
6316 new_version_node->calls_declare_variant_alt
6317 = old_version_node->calls_declare_variant_alt;
6318 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6319 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6320 = id.src_cfun->gimple_df->ipa_pta;
6322 /* Copy the function's static chain. */
6323 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6324 if (p)
6325 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6326 = copy_static_chain (p, &id);
6328 auto_vec<int, 16> new_param_indices;
6329 clone_info *info = clone_info::get (old_version_node);
6330 ipa_param_adjustments *old_param_adjustments
6331 = info ? info->param_adjustments : NULL;
6332 if (old_param_adjustments)
6333 old_param_adjustments->get_updated_indices (&new_param_indices);
6335 /* If there's a tree_map, prepare for substitution. */
6336 if (tree_map)
6337 for (i = 0; i < tree_map->length (); i++)
6339 gimple *init;
6340 replace_info = (*tree_map)[i];
6342 int p = replace_info->parm_num;
6343 if (old_param_adjustments)
6344 p = new_param_indices[p];
6346 tree parm;
6347 for (parm = DECL_ARGUMENTS (old_decl); p;
6348 parm = DECL_CHAIN (parm))
6349 p--;
6350 gcc_assert (parm);
6351 init = setup_one_parameter (&id, parm, replace_info->new_tree,
6352 id.src_fn, NULL, &vars);
6353 if (init)
6354 init_stmts.safe_push (init);
6357 ipa_param_body_adjustments *param_body_adjs = NULL;
6358 if (param_adjustments)
6360 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6361 new_decl, old_decl,
6362 &id, &vars, tree_map);
6363 id.param_body_adjs = param_body_adjs;
6364 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6366 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6367 DECL_ARGUMENTS (new_decl)
6368 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6370 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6371 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6373 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6375 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6376 /* Add local vars. */
6377 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6379 if (DECL_RESULT (old_decl) == NULL_TREE)
6381 else if (param_adjustments && param_adjustments->m_skip_return
6382 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6384 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6385 &id);
6386 declare_inline_vars (NULL, resdecl_repl);
6387 if (DECL_BY_REFERENCE (DECL_RESULT (old_decl)))
6388 resdecl_repl = build_fold_addr_expr (resdecl_repl);
6389 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6391 DECL_RESULT (new_decl)
6392 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6393 RESULT_DECL, NULL_TREE, void_type_node);
6394 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6395 DECL_IS_MALLOC (new_decl) = false;
6396 cfun->returns_struct = 0;
6397 cfun->returns_pcc_struct = 0;
6399 else
6401 tree old_name;
6402 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6403 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6404 if (gimple_in_ssa_p (id.src_cfun)
6405 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6406 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6408 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6409 insert_decl_map (&id, old_name, new_name);
6410 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6411 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6415 /* Set up the destination functions loop tree. */
6416 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6418 cfun->curr_properties &= ~PROP_loops;
6419 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6420 cfun->curr_properties |= PROP_loops;
6423 /* Copy the Function's body. */
6424 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6425 new_entry);
6427 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6428 number_blocks (new_decl);
6430 /* We want to create the BB unconditionally, so that the addition of
6431 debug stmts doesn't affect BB count, which may in the end cause
6432 codegen differences. */
6433 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6434 while (init_stmts.length ())
6435 insert_init_stmt (&id, bb, init_stmts.pop ());
6436 update_clone_info (&id);
6438 /* Remap the nonlocal_goto_save_area, if any. */
6439 if (cfun->nonlocal_goto_save_area)
6441 struct walk_stmt_info wi;
6443 memset (&wi, 0, sizeof (wi));
6444 wi.info = &id;
6445 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6448 /* Clean up. */
6449 delete id.decl_map;
6450 if (id.debug_map)
6451 delete id.debug_map;
6452 free_dominance_info (CDI_DOMINATORS);
6453 free_dominance_info (CDI_POST_DOMINATORS);
6455 update_max_bb_count ();
6456 fold_marked_statements (0, id.statements_to_fold);
6457 delete id.statements_to_fold;
6458 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6459 if (id.dst_node->definition)
6460 cgraph_edge::rebuild_references ();
6461 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6463 calculate_dominance_info (CDI_DOMINATORS);
6464 fix_loop_structure (NULL);
6466 update_ssa (TODO_update_ssa);
6468 /* After partial cloning we need to rescale frequencies, so they are
6469 within proper range in the cloned function. */
6470 if (new_entry)
6472 struct cgraph_edge *e;
6473 rebuild_frequencies ();
6475 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6476 for (e = new_version_node->callees; e; e = e->next_callee)
6478 basic_block bb = gimple_bb (e->call_stmt);
6479 e->count = bb->count;
6481 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6483 basic_block bb = gimple_bb (e->call_stmt);
6484 e->count = bb->count;
6488 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6490 vec<tree, va_gc> **debug_args = NULL;
6491 unsigned int len = 0;
6492 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6494 for (i = 0; i < reset_len; i++)
6496 tree parm = param_body_adjs->m_reset_debug_decls[i];
6497 gcc_assert (is_gimple_reg (parm));
6498 tree ddecl;
6500 if (debug_args == NULL)
6502 debug_args = decl_debug_args_insert (new_decl);
6503 len = vec_safe_length (*debug_args);
6505 ddecl = make_node (DEBUG_EXPR_DECL);
6506 DECL_ARTIFICIAL (ddecl) = 1;
6507 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6508 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6509 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6510 vec_safe_push (*debug_args, ddecl);
6512 if (debug_args != NULL)
6514 /* On the callee side, add
6515 DEBUG D#Y s=> parm
6516 DEBUG var => D#Y
6517 stmts to the first bb where var is a VAR_DECL created for the
6518 optimized away parameter in DECL_INITIAL block. This hints
6519 in the debug info that var (whole DECL_ORIGIN is the parm
6520 PARM_DECL) is optimized away, but could be looked up at the
6521 call site as value of D#X there. */
6522 tree vexpr;
6523 gimple_stmt_iterator cgsi
6524 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6525 gimple *def_temp;
6526 tree var = vars;
6527 i = vec_safe_length (*debug_args);
6530 i -= 2;
6531 while (var != NULL_TREE
6532 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6533 var = TREE_CHAIN (var);
6534 if (var == NULL_TREE)
6535 break;
6536 vexpr = make_node (DEBUG_EXPR_DECL);
6537 tree parm = (**debug_args)[i];
6538 DECL_ARTIFICIAL (vexpr) = 1;
6539 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6540 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6541 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6542 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6543 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6544 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6546 while (i > len);
6549 delete param_body_adjs;
6550 free_dominance_info (CDI_DOMINATORS);
6551 free_dominance_info (CDI_POST_DOMINATORS);
6553 gcc_assert (!id.debug_stmts.exists ());
6554 pop_cfun ();
6555 input_location = saved_location;
6556 return;
6559 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6560 the callee and return the inlined body on success. */
6562 tree
6563 maybe_inline_call_in_expr (tree exp)
6565 tree fn = get_callee_fndecl (exp);
6567 /* We can only try to inline "const" functions. */
6568 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6570 call_expr_arg_iterator iter;
6571 copy_body_data id;
6572 tree param, arg, t;
6573 hash_map<tree, tree> decl_map;
6575 /* Remap the parameters. */
6576 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6577 param;
6578 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6579 decl_map.put (param, arg);
6581 memset (&id, 0, sizeof (id));
6582 id.src_fn = fn;
6583 id.dst_fn = current_function_decl;
6584 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6585 id.decl_map = &decl_map;
6587 id.copy_decl = copy_decl_no_change;
6588 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6589 id.transform_new_cfg = false;
6590 id.transform_return_to_modify = true;
6591 id.transform_parameter = true;
6592 id.transform_lang_insert_block = NULL;
6594 /* Make sure not to unshare trees behind the front-end's back
6595 since front-end specific mechanisms may rely on sharing. */
6596 id.regimplify = false;
6597 id.do_not_unshare = true;
6599 /* We're not inside any EH region. */
6600 id.eh_lp_nr = 0;
6602 t = copy_tree_body (&id);
6604 /* We can only return something suitable for use in a GENERIC
6605 expression tree. */
6606 if (TREE_CODE (t) == MODIFY_EXPR)
6607 return TREE_OPERAND (t, 1);
6610 return NULL_TREE;
6613 /* Duplicate a type, fields and all. */
6615 tree
6616 build_duplicate_type (tree type)
6618 struct copy_body_data id;
6620 memset (&id, 0, sizeof (id));
6621 id.src_fn = current_function_decl;
6622 id.dst_fn = current_function_decl;
6623 id.src_cfun = cfun;
6624 id.decl_map = new hash_map<tree, tree>;
6625 id.debug_map = NULL;
6626 id.copy_decl = copy_decl_no_change;
6628 type = remap_type_1 (type, &id);
6630 delete id.decl_map;
6631 if (id.debug_map)
6632 delete id.debug_map;
6634 TYPE_CANONICAL (type) = type;
6636 return type;
6639 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6640 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6641 evaluation. */
6643 tree
6644 copy_fn (tree fn, tree& parms, tree& result)
6646 copy_body_data id;
6647 tree param;
6648 hash_map<tree, tree> decl_map;
6650 tree *p = &parms;
6651 *p = NULL_TREE;
6653 memset (&id, 0, sizeof (id));
6654 id.src_fn = fn;
6655 id.dst_fn = current_function_decl;
6656 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6657 id.decl_map = &decl_map;
6659 id.copy_decl = copy_decl_no_change;
6660 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6661 id.transform_new_cfg = false;
6662 id.transform_return_to_modify = false;
6663 id.transform_parameter = true;
6664 id.transform_lang_insert_block = NULL;
6666 /* Make sure not to unshare trees behind the front-end's back
6667 since front-end specific mechanisms may rely on sharing. */
6668 id.regimplify = false;
6669 id.do_not_unshare = true;
6670 id.do_not_fold = true;
6672 /* We're not inside any EH region. */
6673 id.eh_lp_nr = 0;
6675 /* Remap the parameters and result and return them to the caller. */
6676 for (param = DECL_ARGUMENTS (fn);
6677 param;
6678 param = DECL_CHAIN (param))
6680 *p = remap_decl (param, &id);
6681 p = &DECL_CHAIN (*p);
6684 if (DECL_RESULT (fn))
6685 result = remap_decl (DECL_RESULT (fn), &id);
6686 else
6687 result = NULL_TREE;
6689 return copy_tree_body (&id);