Revise -mdisable-fpregs option and add new -msoft-mult option
[official-gcc.git] / gcc / tree-inline.c
blobb2c58ac4c3bd2071bab73633e36a158bb4d66e58
1 /* Tree inlining.
2 Copyright (C) 2001-2021 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64 #include "alloc-pool.h"
65 #include "symbol-summary.h"
66 #include "symtab-thunks.h"
67 #include "symtab-clones.h"
69 /* I'm not real happy about this, but we need to handle gimple and
70 non-gimple trees. */
72 /* Inlining, Cloning, Versioning, Parallelization
74 Inlining: a function body is duplicated, but the PARM_DECLs are
75 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
76 MODIFY_EXPRs that store to a dedicated returned-value variable.
77 The duplicated eh_region info of the copy will later be appended
78 to the info for the caller; the eh_region info in copied throwing
79 statements and RESX statements are adjusted accordingly.
81 Cloning: (only in C++) We have one body for a con/de/structor, and
82 multiple function decls, each with a unique parameter list.
83 Duplicate the body, using the given splay tree; some parameters
84 will become constants (like 0 or 1).
86 Versioning: a function body is duplicated and the result is a new
87 function rather than into blocks of an existing function as with
88 inlining. Some parameters will become constants.
90 Parallelization: a region of a function is duplicated resulting in
91 a new function. Variables may be replaced with complex expressions
92 to enable shared variable semantics.
94 All of these will simultaneously lookup any callgraph edges. If
95 we're going to inline the duplicated function body, and the given
96 function has some cloned callgraph nodes (one for each place this
97 function will be inlined) those callgraph edges will be duplicated.
98 If we're cloning the body, those callgraph edges will be
99 updated to point into the new body. (Note that the original
100 callgraph node and edge list will not be altered.)
102 See the CALL_EXPR handling case in copy_tree_body_r (). */
104 /* To Do:
106 o In order to make inlining-on-trees work, we pessimized
107 function-local static constants. In particular, they are now
108 always output, even when not addressed. Fix this by treating
109 function-local static constants just like global static
110 constants; the back-end already knows not to output them if they
111 are not needed.
113 o Provide heuristics to clamp inlining of recursive template
114 calls? */
117 /* Weights that estimate_num_insns uses to estimate the size of the
118 produced code. */
120 eni_weights eni_size_weights;
122 /* Weights that estimate_num_insns uses to estimate the time necessary
123 to execute the produced code. */
125 eni_weights eni_time_weights;
127 /* Prototypes. */
129 static tree declare_return_variable (copy_body_data *, tree, tree,
130 basic_block);
131 static void remap_block (tree *, copy_body_data *);
132 static void copy_bind_expr (tree *, int *, copy_body_data *);
133 static void declare_inline_vars (tree, tree);
134 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
135 static void prepend_lexical_block (tree current_block, tree new_block);
136 static tree copy_result_decl_to_var (tree, copy_body_data *);
137 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
138 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
139 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
141 /* Insert a tree->tree mapping for ID. Despite the name suggests
142 that the trees should be variables, it is used for more than that. */
144 void
145 insert_decl_map (copy_body_data *id, tree key, tree value)
147 id->decl_map->put (key, value);
149 /* Always insert an identity map as well. If we see this same new
150 node again, we won't want to duplicate it a second time. */
151 if (key != value)
152 id->decl_map->put (value, value);
155 /* If nonzero, we're remapping the contents of inlined debug
156 statements. If negative, an error has occurred, such as a
157 reference to a variable that isn't available in the inlined
158 context. */
159 static int processing_debug_stmt = 0;
161 /* Construct new SSA name for old NAME. ID is the inline context. */
163 static tree
164 remap_ssa_name (tree name, copy_body_data *id)
166 tree new_tree, var;
167 tree *n;
169 gcc_assert (TREE_CODE (name) == SSA_NAME);
171 n = id->decl_map->get (name);
172 if (n)
174 /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
175 remove an unused LHS from a call statement. Such LHS can however
176 still appear in debug statements, but their value is lost in this
177 function and we do not want to map them. */
178 if (id->killed_new_ssa_names
179 && id->killed_new_ssa_names->contains (*n))
181 gcc_assert (processing_debug_stmt);
182 processing_debug_stmt = -1;
183 return name;
186 return unshare_expr (*n);
189 if (processing_debug_stmt)
191 if (SSA_NAME_IS_DEFAULT_DEF (name)
192 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
193 && id->entry_bb == NULL
194 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
196 tree vexpr = make_node (DEBUG_EXPR_DECL);
197 gimple *def_temp;
198 gimple_stmt_iterator gsi;
199 tree val = SSA_NAME_VAR (name);
201 n = id->decl_map->get (val);
202 if (n != NULL)
203 val = *n;
204 if (TREE_CODE (val) != PARM_DECL
205 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
207 processing_debug_stmt = -1;
208 return name;
210 n = id->decl_map->get (val);
211 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
212 return *n;
213 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
214 DECL_ARTIFICIAL (vexpr) = 1;
215 TREE_TYPE (vexpr) = TREE_TYPE (name);
216 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
217 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
218 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
219 insert_decl_map (id, val, vexpr);
220 return vexpr;
223 processing_debug_stmt = -1;
224 return name;
227 /* Remap anonymous SSA names or SSA names of anonymous decls. */
228 var = SSA_NAME_VAR (name);
229 if (!var
230 || (!SSA_NAME_IS_DEFAULT_DEF (name)
231 && VAR_P (var)
232 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
233 && DECL_ARTIFICIAL (var)
234 && DECL_IGNORED_P (var)
235 && !DECL_NAME (var)))
237 struct ptr_info_def *pi;
238 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
239 if (!var && SSA_NAME_IDENTIFIER (name))
240 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
241 insert_decl_map (id, name, new_tree);
242 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
243 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
244 /* At least IPA points-to info can be directly transferred. */
245 if (id->src_cfun->gimple_df
246 && id->src_cfun->gimple_df->ipa_pta
247 && POINTER_TYPE_P (TREE_TYPE (name))
248 && (pi = SSA_NAME_PTR_INFO (name))
249 && !pi->pt.anything)
251 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
252 new_pi->pt = pi->pt;
254 /* So can range-info. */
255 if (!POINTER_TYPE_P (TREE_TYPE (name))
256 && SSA_NAME_RANGE_INFO (name))
257 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
258 SSA_NAME_RANGE_INFO (name));
259 return new_tree;
262 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
263 in copy_bb. */
264 new_tree = remap_decl (var, id);
266 /* We might've substituted constant or another SSA_NAME for
267 the variable.
269 Replace the SSA name representing RESULT_DECL by variable during
270 inlining: this saves us from need to introduce PHI node in a case
271 return value is just partly initialized. */
272 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
273 && (!SSA_NAME_VAR (name)
274 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
275 || !id->transform_return_to_modify))
277 struct ptr_info_def *pi;
278 new_tree = make_ssa_name (new_tree);
279 insert_decl_map (id, name, new_tree);
280 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
281 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
282 /* At least IPA points-to info can be directly transferred. */
283 if (id->src_cfun->gimple_df
284 && id->src_cfun->gimple_df->ipa_pta
285 && POINTER_TYPE_P (TREE_TYPE (name))
286 && (pi = SSA_NAME_PTR_INFO (name))
287 && !pi->pt.anything)
289 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
290 new_pi->pt = pi->pt;
292 /* So can range-info. */
293 if (!POINTER_TYPE_P (TREE_TYPE (name))
294 && SSA_NAME_RANGE_INFO (name))
295 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
296 SSA_NAME_RANGE_INFO (name));
297 if (SSA_NAME_IS_DEFAULT_DEF (name))
299 /* By inlining function having uninitialized variable, we might
300 extend the lifetime (variable might get reused). This cause
301 ICE in the case we end up extending lifetime of SSA name across
302 abnormal edge, but also increase register pressure.
304 We simply initialize all uninitialized vars by 0 except
305 for case we are inlining to very first BB. We can avoid
306 this for all BBs that are not inside strongly connected
307 regions of the CFG, but this is expensive to test. */
308 if (id->entry_bb
309 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
310 && (!SSA_NAME_VAR (name)
311 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
312 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
313 0)->dest
314 || EDGE_COUNT (id->entry_bb->preds) != 1))
316 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
317 gimple *init_stmt;
318 tree zero = build_zero_cst (TREE_TYPE (new_tree));
320 init_stmt = gimple_build_assign (new_tree, zero);
321 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
322 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
324 else
326 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
327 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
331 else
332 insert_decl_map (id, name, new_tree);
333 return new_tree;
336 /* Remap DECL during the copying of the BLOCK tree for the function. */
338 tree
339 remap_decl (tree decl, copy_body_data *id)
341 tree *n;
343 /* We only remap local variables in the current function. */
345 /* See if we have remapped this declaration. */
347 n = id->decl_map->get (decl);
349 if (!n && processing_debug_stmt)
351 processing_debug_stmt = -1;
352 return decl;
355 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
356 necessary DECLs have already been remapped and we do not want to duplicate
357 a decl coming from outside of the sequence we are copying. */
358 if (!n
359 && id->prevent_decl_creation_for_types
360 && id->remapping_type_depth > 0
361 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
362 return decl;
364 /* If we didn't already have an equivalent for this declaration, create one
365 now. */
366 if (!n)
368 /* Make a copy of the variable or label. */
369 tree t = id->copy_decl (decl, id);
371 /* Remember it, so that if we encounter this local entity again
372 we can reuse this copy. Do this early because remap_type may
373 need this decl for TYPE_STUB_DECL. */
374 insert_decl_map (id, decl, t);
376 if (!DECL_P (t))
377 return t;
379 /* Remap types, if necessary. */
380 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
381 if (TREE_CODE (t) == TYPE_DECL)
383 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
385 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
386 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
387 is not set on the TYPE_DECL, for example in LTO mode. */
388 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
390 tree x = build_variant_type_copy (TREE_TYPE (t));
391 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
392 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
393 DECL_ORIGINAL_TYPE (t) = x;
397 /* Remap sizes as necessary. */
398 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
399 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
401 /* If fields, do likewise for offset and qualifier. */
402 if (TREE_CODE (t) == FIELD_DECL)
404 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
405 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
406 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
409 return t;
412 if (id->do_not_unshare)
413 return *n;
414 else
415 return unshare_expr (*n);
418 static tree
419 remap_type_1 (tree type, copy_body_data *id)
421 tree new_tree, t;
423 /* We do need a copy. build and register it now. If this is a pointer or
424 reference type, remap the designated type and make a new pointer or
425 reference type. */
426 if (TREE_CODE (type) == POINTER_TYPE)
428 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
429 TYPE_MODE (type),
430 TYPE_REF_CAN_ALIAS_ALL (type));
431 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
432 new_tree = build_type_attribute_qual_variant (new_tree,
433 TYPE_ATTRIBUTES (type),
434 TYPE_QUALS (type));
435 insert_decl_map (id, type, new_tree);
436 return new_tree;
438 else if (TREE_CODE (type) == REFERENCE_TYPE)
440 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
441 TYPE_MODE (type),
442 TYPE_REF_CAN_ALIAS_ALL (type));
443 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
444 new_tree = build_type_attribute_qual_variant (new_tree,
445 TYPE_ATTRIBUTES (type),
446 TYPE_QUALS (type));
447 insert_decl_map (id, type, new_tree);
448 return new_tree;
450 else
451 new_tree = copy_node (type);
453 insert_decl_map (id, type, new_tree);
455 /* This is a new type, not a copy of an old type. Need to reassociate
456 variants. We can handle everything except the main variant lazily. */
457 t = TYPE_MAIN_VARIANT (type);
458 if (type != t)
460 t = remap_type (t, id);
461 TYPE_MAIN_VARIANT (new_tree) = t;
462 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
463 TYPE_NEXT_VARIANT (t) = new_tree;
465 else
467 TYPE_MAIN_VARIANT (new_tree) = new_tree;
468 TYPE_NEXT_VARIANT (new_tree) = NULL;
471 if (TYPE_STUB_DECL (type))
472 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
474 /* Lazily create pointer and reference types. */
475 TYPE_POINTER_TO (new_tree) = NULL;
476 TYPE_REFERENCE_TO (new_tree) = NULL;
478 /* Copy all types that may contain references to local variables; be sure to
479 preserve sharing in between type and its main variant when possible. */
480 switch (TREE_CODE (new_tree))
482 case INTEGER_TYPE:
483 case REAL_TYPE:
484 case FIXED_POINT_TYPE:
485 case ENUMERAL_TYPE:
486 case BOOLEAN_TYPE:
487 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
489 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
490 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
492 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
493 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
495 else
497 t = TYPE_MIN_VALUE (new_tree);
498 if (t && TREE_CODE (t) != INTEGER_CST)
499 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
501 t = TYPE_MAX_VALUE (new_tree);
502 if (t && TREE_CODE (t) != INTEGER_CST)
503 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
505 return new_tree;
507 case FUNCTION_TYPE:
508 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
509 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
510 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
511 else
512 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
513 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
514 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
515 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
516 else
517 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
518 return new_tree;
520 case ARRAY_TYPE:
521 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
522 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
523 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
524 else
525 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
527 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
529 gcc_checking_assert (TYPE_DOMAIN (type)
530 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
531 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
533 else
535 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
536 /* For array bounds where we have decided not to copy over the bounds
537 variable which isn't used in OpenMP/OpenACC region, change them to
538 an uninitialized VAR_DECL temporary. */
539 if (id->adjust_array_error_bounds
540 && TYPE_DOMAIN (new_tree)
541 && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
542 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
544 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
545 DECL_ATTRIBUTES (v)
546 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
547 DECL_ATTRIBUTES (v));
548 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
551 break;
553 case RECORD_TYPE:
554 case UNION_TYPE:
555 case QUAL_UNION_TYPE:
556 if (TYPE_MAIN_VARIANT (type) != type
557 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
558 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
559 else
561 tree f, nf = NULL;
563 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
565 t = remap_decl (f, id);
566 DECL_CONTEXT (t) = new_tree;
567 DECL_CHAIN (t) = nf;
568 nf = t;
570 TYPE_FIELDS (new_tree) = nreverse (nf);
572 break;
574 case OFFSET_TYPE:
575 default:
576 /* Shouldn't have been thought variable sized. */
577 gcc_unreachable ();
580 /* All variants of type share the same size, so use the already remaped data. */
581 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
583 tree s = TYPE_SIZE (type);
584 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
585 tree su = TYPE_SIZE_UNIT (type);
586 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
587 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
588 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
589 || s == mvs);
590 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
591 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
592 || su == mvsu);
593 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
594 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
596 else
598 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
599 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
602 return new_tree;
605 /* Helper function for remap_type_2, called through walk_tree. */
607 static tree
608 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
610 copy_body_data *id = (copy_body_data *) data;
612 if (TYPE_P (*tp))
613 *walk_subtrees = 0;
615 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
616 return *tp;
618 return NULL_TREE;
621 /* Return true if TYPE needs to be remapped because remap_decl on any
622 needed embedded decl returns something other than that decl. */
624 static bool
625 remap_type_2 (tree type, copy_body_data *id)
627 tree t;
629 #define RETURN_TRUE_IF_VAR(T) \
630 do \
632 tree _t = (T); \
633 if (_t) \
635 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
636 return true; \
637 if (!TYPE_SIZES_GIMPLIFIED (type) \
638 && walk_tree (&_t, remap_type_3, id, NULL)) \
639 return true; \
642 while (0)
644 switch (TREE_CODE (type))
646 case POINTER_TYPE:
647 case REFERENCE_TYPE:
648 case FUNCTION_TYPE:
649 case METHOD_TYPE:
650 return remap_type_2 (TREE_TYPE (type), id);
652 case INTEGER_TYPE:
653 case REAL_TYPE:
654 case FIXED_POINT_TYPE:
655 case ENUMERAL_TYPE:
656 case BOOLEAN_TYPE:
657 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
658 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
659 return false;
661 case ARRAY_TYPE:
662 if (remap_type_2 (TREE_TYPE (type), id)
663 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
664 return true;
665 break;
667 case RECORD_TYPE:
668 case UNION_TYPE:
669 case QUAL_UNION_TYPE:
670 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
671 if (TREE_CODE (t) == FIELD_DECL)
673 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
674 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
675 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
676 if (TREE_CODE (type) == QUAL_UNION_TYPE)
677 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
679 break;
681 default:
682 return false;
685 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
686 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
687 return false;
688 #undef RETURN_TRUE_IF_VAR
691 tree
692 remap_type (tree type, copy_body_data *id)
694 tree *node;
695 tree tmp;
697 if (type == NULL)
698 return type;
700 /* See if we have remapped this type. */
701 node = id->decl_map->get (type);
702 if (node)
703 return *node;
705 /* The type only needs remapping if it's variably modified. */
706 if (! variably_modified_type_p (type, id->src_fn)
707 /* Don't remap if copy_decl method doesn't always return a new
708 decl and for all embedded decls returns the passed in decl. */
709 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
711 insert_decl_map (id, type, type);
712 return type;
715 id->remapping_type_depth++;
716 tmp = remap_type_1 (type, id);
717 id->remapping_type_depth--;
719 return tmp;
722 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
724 static bool
725 can_be_nonlocal (tree decl, copy_body_data *id)
727 /* We cannot duplicate function decls. */
728 if (TREE_CODE (decl) == FUNCTION_DECL)
729 return true;
731 /* Local static vars must be non-local or we get multiple declaration
732 problems. */
733 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
734 return true;
736 return false;
739 static tree
740 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
741 copy_body_data *id)
743 tree old_var;
744 tree new_decls = NULL_TREE;
746 /* Remap its variables. */
747 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
749 tree new_var;
751 if (can_be_nonlocal (old_var, id))
753 /* We need to add this variable to the local decls as otherwise
754 nothing else will do so. */
755 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
756 add_local_decl (cfun, old_var);
757 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
758 && !DECL_IGNORED_P (old_var)
759 && nonlocalized_list)
760 vec_safe_push (*nonlocalized_list, old_var);
761 continue;
764 /* Remap the variable. */
765 new_var = remap_decl (old_var, id);
767 /* If we didn't remap this variable, we can't mess with its
768 TREE_CHAIN. If we remapped this variable to the return slot, it's
769 already declared somewhere else, so don't declare it here. */
771 if (new_var == id->retvar)
773 else if (!new_var)
775 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
776 && !DECL_IGNORED_P (old_var)
777 && nonlocalized_list)
778 vec_safe_push (*nonlocalized_list, old_var);
780 else
782 gcc_assert (DECL_P (new_var));
783 DECL_CHAIN (new_var) = new_decls;
784 new_decls = new_var;
786 /* Also copy value-expressions. */
787 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
789 tree tem = DECL_VALUE_EXPR (new_var);
790 bool old_regimplify = id->regimplify;
791 id->remapping_type_depth++;
792 walk_tree (&tem, copy_tree_body_r, id, NULL);
793 id->remapping_type_depth--;
794 id->regimplify = old_regimplify;
795 SET_DECL_VALUE_EXPR (new_var, tem);
800 return nreverse (new_decls);
803 /* Copy the BLOCK to contain remapped versions of the variables
804 therein. And hook the new block into the block-tree. */
806 static void
807 remap_block (tree *block, copy_body_data *id)
809 tree old_block;
810 tree new_block;
812 /* Make the new block. */
813 old_block = *block;
814 new_block = make_node (BLOCK);
815 TREE_USED (new_block) = TREE_USED (old_block);
816 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
817 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
818 BLOCK_NONLOCALIZED_VARS (new_block)
819 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
820 *block = new_block;
822 /* Remap its variables. */
823 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
824 &BLOCK_NONLOCALIZED_VARS (new_block),
825 id);
827 if (id->transform_lang_insert_block)
828 id->transform_lang_insert_block (new_block);
830 /* Remember the remapped block. */
831 insert_decl_map (id, old_block, new_block);
834 /* Copy the whole block tree and root it in id->block. */
836 static tree
837 remap_blocks (tree block, copy_body_data *id)
839 tree t;
840 tree new_tree = block;
842 if (!block)
843 return NULL;
845 remap_block (&new_tree, id);
846 gcc_assert (new_tree != block);
847 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
848 prepend_lexical_block (new_tree, remap_blocks (t, id));
849 /* Blocks are in arbitrary order, but make things slightly prettier and do
850 not swap order when producing a copy. */
851 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
852 return new_tree;
855 /* Remap the block tree rooted at BLOCK to nothing. */
857 static void
858 remap_blocks_to_null (tree block, copy_body_data *id)
860 tree t;
861 insert_decl_map (id, block, NULL_TREE);
862 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
863 remap_blocks_to_null (t, id);
866 /* Remap the location info pointed to by LOCUS. */
868 static location_t
869 remap_location (location_t locus, copy_body_data *id)
871 if (LOCATION_BLOCK (locus))
873 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
874 gcc_assert (n);
875 if (*n)
876 return set_block (locus, *n);
879 locus = LOCATION_LOCUS (locus);
881 if (locus != UNKNOWN_LOCATION && id->block)
882 return set_block (locus, id->block);
884 return locus;
887 static void
888 copy_statement_list (tree *tp)
890 tree_stmt_iterator oi, ni;
891 tree new_tree;
893 new_tree = alloc_stmt_list ();
894 ni = tsi_start (new_tree);
895 oi = tsi_start (*tp);
896 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
897 *tp = new_tree;
899 for (; !tsi_end_p (oi); tsi_next (&oi))
901 tree stmt = tsi_stmt (oi);
902 if (TREE_CODE (stmt) == STATEMENT_LIST)
903 /* This copy is not redundant; tsi_link_after will smash this
904 STATEMENT_LIST into the end of the one we're building, and we
905 don't want to do that with the original. */
906 copy_statement_list (&stmt);
907 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
911 static void
912 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
914 tree block = BIND_EXPR_BLOCK (*tp);
915 /* Copy (and replace) the statement. */
916 copy_tree_r (tp, walk_subtrees, NULL);
917 if (block)
919 remap_block (&block, id);
920 BIND_EXPR_BLOCK (*tp) = block;
923 if (BIND_EXPR_VARS (*tp))
924 /* This will remap a lot of the same decls again, but this should be
925 harmless. */
926 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
930 /* Create a new gimple_seq by remapping all the statements in BODY
931 using the inlining information in ID. */
933 static gimple_seq
934 remap_gimple_seq (gimple_seq body, copy_body_data *id)
936 gimple_stmt_iterator si;
937 gimple_seq new_body = NULL;
939 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
941 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
942 gimple_seq_add_seq (&new_body, new_stmts);
945 return new_body;
949 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
950 block using the mapping information in ID. */
952 static gimple *
953 copy_gimple_bind (gbind *stmt, copy_body_data *id)
955 gimple *new_bind;
956 tree new_block, new_vars;
957 gimple_seq body, new_body;
959 /* Copy the statement. Note that we purposely don't use copy_stmt
960 here because we need to remap statements as we copy. */
961 body = gimple_bind_body (stmt);
962 new_body = remap_gimple_seq (body, id);
964 new_block = gimple_bind_block (stmt);
965 if (new_block)
966 remap_block (&new_block, id);
968 /* This will remap a lot of the same decls again, but this should be
969 harmless. */
970 new_vars = gimple_bind_vars (stmt);
971 if (new_vars)
972 new_vars = remap_decls (new_vars, NULL, id);
974 new_bind = gimple_build_bind (new_vars, new_body, new_block);
976 return new_bind;
979 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
981 static bool
982 is_parm (tree decl)
984 if (TREE_CODE (decl) == SSA_NAME)
986 decl = SSA_NAME_VAR (decl);
987 if (!decl)
988 return false;
991 return (TREE_CODE (decl) == PARM_DECL);
994 /* Remap the dependence CLIQUE from the source to the destination function
995 as specified in ID. */
997 static unsigned short
998 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1000 if (clique == 0 || processing_debug_stmt)
1001 return 0;
1002 if (!id->dependence_map)
1003 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1004 bool existed;
1005 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1006 if (!existed)
1008 /* Clique 1 is reserved for local ones set by PTA. */
1009 if (cfun->last_clique == 0)
1010 cfun->last_clique = 1;
1011 newc = ++cfun->last_clique;
1013 return newc;
1016 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1017 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1018 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1019 recursing into the children nodes of *TP. */
1021 static tree
1022 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1024 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1025 copy_body_data *id = (copy_body_data *) wi_p->info;
1026 tree fn = id->src_fn;
1028 /* For recursive invocations this is no longer the LHS itself. */
1029 bool is_lhs = wi_p->is_lhs;
1030 wi_p->is_lhs = false;
1032 if (TREE_CODE (*tp) == SSA_NAME)
1034 *tp = remap_ssa_name (*tp, id);
1035 *walk_subtrees = 0;
1036 if (is_lhs)
1037 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1038 return NULL;
1040 else if (auto_var_in_fn_p (*tp, fn))
1042 /* Local variables and labels need to be replaced by equivalent
1043 variables. We don't want to copy static variables; there's
1044 only one of those, no matter how many times we inline the
1045 containing function. Similarly for globals from an outer
1046 function. */
1047 tree new_decl;
1049 /* Remap the declaration. */
1050 new_decl = remap_decl (*tp, id);
1051 gcc_assert (new_decl);
1052 /* Replace this variable with the copy. */
1053 STRIP_TYPE_NOPS (new_decl);
1054 /* ??? The C++ frontend uses void * pointer zero to initialize
1055 any other type. This confuses the middle-end type verification.
1056 As cloned bodies do not go through gimplification again the fixup
1057 there doesn't trigger. */
1058 if (TREE_CODE (new_decl) == INTEGER_CST
1059 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1060 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1061 *tp = new_decl;
1062 *walk_subtrees = 0;
1064 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1065 gcc_unreachable ();
1066 else if (TREE_CODE (*tp) == SAVE_EXPR)
1067 gcc_unreachable ();
1068 else if (TREE_CODE (*tp) == LABEL_DECL
1069 && (!DECL_CONTEXT (*tp)
1070 || decl_function_context (*tp) == id->src_fn))
1071 /* These may need to be remapped for EH handling. */
1072 *tp = remap_decl (*tp, id);
1073 else if (TREE_CODE (*tp) == FIELD_DECL)
1075 /* If the enclosing record type is variably_modified_type_p, the field
1076 has already been remapped. Otherwise, it need not be. */
1077 tree *n = id->decl_map->get (*tp);
1078 if (n)
1079 *tp = *n;
1080 *walk_subtrees = 0;
1082 else if (TYPE_P (*tp))
1083 /* Types may need remapping as well. */
1084 *tp = remap_type (*tp, id);
1085 else if (CONSTANT_CLASS_P (*tp))
1087 /* If this is a constant, we have to copy the node iff the type
1088 will be remapped. copy_tree_r will not copy a constant. */
1089 tree new_type = remap_type (TREE_TYPE (*tp), id);
1091 if (new_type == TREE_TYPE (*tp))
1092 *walk_subtrees = 0;
1094 else if (TREE_CODE (*tp) == INTEGER_CST)
1095 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1096 else
1098 *tp = copy_node (*tp);
1099 TREE_TYPE (*tp) = new_type;
1102 else
1104 /* Otherwise, just copy the node. Note that copy_tree_r already
1105 knows not to copy VAR_DECLs, etc., so this is safe. */
1107 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1109 /* We need to re-canonicalize MEM_REFs from inline substitutions
1110 that can happen when a pointer argument is an ADDR_EXPR.
1111 Recurse here manually to allow that. */
1112 tree ptr = TREE_OPERAND (*tp, 0);
1113 tree type = remap_type (TREE_TYPE (*tp), id);
1114 tree old = *tp;
1115 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1116 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1117 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1118 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1119 copy_warning (*tp, old);
1120 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1122 MR_DEPENDENCE_CLIQUE (*tp)
1123 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1124 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1126 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1127 remapped a parameter as the property might be valid only
1128 for the parameter itself. */
1129 if (TREE_THIS_NOTRAP (old)
1130 && (!is_parm (TREE_OPERAND (old, 0))
1131 || (!id->transform_parameter && is_parm (ptr))))
1132 TREE_THIS_NOTRAP (*tp) = 1;
1133 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1134 *walk_subtrees = 0;
1135 return NULL;
1138 /* Here is the "usual case". Copy this tree node, and then
1139 tweak some special cases. */
1140 copy_tree_r (tp, walk_subtrees, NULL);
1142 if (TREE_CODE (*tp) != OMP_CLAUSE)
1143 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1145 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1147 /* The copied TARGET_EXPR has never been expanded, even if the
1148 original node was expanded already. */
1149 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1150 TREE_OPERAND (*tp, 3) = NULL_TREE;
1152 else if (TREE_CODE (*tp) == ADDR_EXPR)
1154 /* Variable substitution need not be simple. In particular,
1155 the MEM_REF substitution above. Make sure that
1156 TREE_CONSTANT and friends are up-to-date. */
1157 int invariant = is_gimple_min_invariant (*tp);
1158 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1159 recompute_tree_invariant_for_addr_expr (*tp);
1161 /* If this used to be invariant, but is not any longer,
1162 then regimplification is probably needed. */
1163 if (invariant && !is_gimple_min_invariant (*tp))
1164 id->regimplify = true;
1166 *walk_subtrees = 0;
1170 /* Update the TREE_BLOCK for the cloned expr. */
1171 if (EXPR_P (*tp))
1173 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1174 tree old_block = TREE_BLOCK (*tp);
1175 if (old_block)
1177 tree *n;
1178 n = id->decl_map->get (TREE_BLOCK (*tp));
1179 if (n)
1180 new_block = *n;
1182 TREE_SET_BLOCK (*tp, new_block);
1185 /* Keep iterating. */
1186 return NULL_TREE;
1190 /* Called from copy_body_id via walk_tree. DATA is really a
1191 `copy_body_data *'. */
1193 tree
1194 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1196 copy_body_data *id = (copy_body_data *) data;
1197 tree fn = id->src_fn;
1198 tree new_block;
1200 /* Begin by recognizing trees that we'll completely rewrite for the
1201 inlining context. Our output for these trees is completely
1202 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1203 into an edge). Further down, we'll handle trees that get
1204 duplicated and/or tweaked. */
1206 /* When requested, RETURN_EXPRs should be transformed to just the
1207 contained MODIFY_EXPR. The branch semantics of the return will
1208 be handled elsewhere by manipulating the CFG rather than a statement. */
1209 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1211 tree assignment = TREE_OPERAND (*tp, 0);
1213 /* If we're returning something, just turn that into an
1214 assignment into the equivalent of the original RESULT_DECL.
1215 If the "assignment" is just the result decl, the result
1216 decl has already been set (e.g. a recent "foo (&result_decl,
1217 ...)"); just toss the entire RETURN_EXPR. */
1218 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1220 /* Replace the RETURN_EXPR with (a copy of) the
1221 MODIFY_EXPR hanging underneath. */
1222 *tp = copy_node (assignment);
1224 else /* Else the RETURN_EXPR returns no value. */
1226 *tp = NULL;
1227 return (tree) (void *)1;
1230 else if (TREE_CODE (*tp) == SSA_NAME)
1232 *tp = remap_ssa_name (*tp, id);
1233 *walk_subtrees = 0;
1234 return NULL;
1237 /* Local variables and labels need to be replaced by equivalent
1238 variables. We don't want to copy static variables; there's only
1239 one of those, no matter how many times we inline the containing
1240 function. Similarly for globals from an outer function. */
1241 else if (auto_var_in_fn_p (*tp, fn))
1243 tree new_decl;
1245 /* Remap the declaration. */
1246 new_decl = remap_decl (*tp, id);
1247 gcc_assert (new_decl);
1248 /* Replace this variable with the copy. */
1249 STRIP_TYPE_NOPS (new_decl);
1250 *tp = new_decl;
1251 *walk_subtrees = 0;
1253 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1254 copy_statement_list (tp);
1255 else if (TREE_CODE (*tp) == SAVE_EXPR
1256 || TREE_CODE (*tp) == TARGET_EXPR)
1257 remap_save_expr (tp, id->decl_map, walk_subtrees);
1258 else if (TREE_CODE (*tp) == LABEL_DECL
1259 && (! DECL_CONTEXT (*tp)
1260 || decl_function_context (*tp) == id->src_fn))
1261 /* These may need to be remapped for EH handling. */
1262 *tp = remap_decl (*tp, id);
1263 else if (TREE_CODE (*tp) == BIND_EXPR)
1264 copy_bind_expr (tp, walk_subtrees, id);
1265 /* Types may need remapping as well. */
1266 else if (TYPE_P (*tp))
1267 *tp = remap_type (*tp, id);
1269 /* If this is a constant, we have to copy the node iff the type will be
1270 remapped. copy_tree_r will not copy a constant. */
1271 else if (CONSTANT_CLASS_P (*tp))
1273 tree new_type = remap_type (TREE_TYPE (*tp), id);
1275 if (new_type == TREE_TYPE (*tp))
1276 *walk_subtrees = 0;
1278 else if (TREE_CODE (*tp) == INTEGER_CST)
1279 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1280 else
1282 *tp = copy_node (*tp);
1283 TREE_TYPE (*tp) = new_type;
1287 /* Otherwise, just copy the node. Note that copy_tree_r already
1288 knows not to copy VAR_DECLs, etc., so this is safe. */
1289 else
1291 /* Here we handle trees that are not completely rewritten.
1292 First we detect some inlining-induced bogosities for
1293 discarding. */
1294 if (TREE_CODE (*tp) == MODIFY_EXPR
1295 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1296 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1298 /* Some assignments VAR = VAR; don't generate any rtl code
1299 and thus don't count as variable modification. Avoid
1300 keeping bogosities like 0 = 0. */
1301 tree decl = TREE_OPERAND (*tp, 0), value;
1302 tree *n;
1304 n = id->decl_map->get (decl);
1305 if (n)
1307 value = *n;
1308 STRIP_TYPE_NOPS (value);
1309 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1311 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1312 return copy_tree_body_r (tp, walk_subtrees, data);
1316 else if (TREE_CODE (*tp) == INDIRECT_REF)
1318 /* Get rid of *& from inline substitutions that can happen when a
1319 pointer argument is an ADDR_EXPR. */
1320 tree decl = TREE_OPERAND (*tp, 0);
1321 tree *n = id->decl_map->get (decl);
1322 if (n)
1324 /* If we happen to get an ADDR_EXPR in n->value, strip
1325 it manually here as we'll eventually get ADDR_EXPRs
1326 which lie about their types pointed to. In this case
1327 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1328 but we absolutely rely on that. As fold_indirect_ref
1329 does other useful transformations, try that first, though. */
1330 tree type = TREE_TYPE (*tp);
1331 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1332 tree old = *tp;
1333 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1334 if (! *tp)
1336 type = remap_type (type, id);
1337 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1340 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1341 /* ??? We should either assert here or build
1342 a VIEW_CONVERT_EXPR instead of blindly leaking
1343 incompatible types to our IL. */
1344 if (! *tp)
1345 *tp = TREE_OPERAND (ptr, 0);
1347 else
1349 *tp = build1 (INDIRECT_REF, type, ptr);
1350 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1351 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1352 TREE_READONLY (*tp) = TREE_READONLY (old);
1353 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1354 have remapped a parameter as the property might be
1355 valid only for the parameter itself. */
1356 if (TREE_THIS_NOTRAP (old)
1357 && (!is_parm (TREE_OPERAND (old, 0))
1358 || (!id->transform_parameter && is_parm (ptr))))
1359 TREE_THIS_NOTRAP (*tp) = 1;
1362 *walk_subtrees = 0;
1363 return NULL;
1366 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1368 /* We need to re-canonicalize MEM_REFs from inline substitutions
1369 that can happen when a pointer argument is an ADDR_EXPR.
1370 Recurse here manually to allow that. */
1371 tree ptr = TREE_OPERAND (*tp, 0);
1372 tree type = remap_type (TREE_TYPE (*tp), id);
1373 tree old = *tp;
1374 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1375 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1376 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1377 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1378 copy_warning (*tp, old);
1379 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1381 MR_DEPENDENCE_CLIQUE (*tp)
1382 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1383 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1385 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1386 remapped a parameter as the property might be valid only
1387 for the parameter itself. */
1388 if (TREE_THIS_NOTRAP (old)
1389 && (!is_parm (TREE_OPERAND (old, 0))
1390 || (!id->transform_parameter && is_parm (ptr))))
1391 TREE_THIS_NOTRAP (*tp) = 1;
1392 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1393 *walk_subtrees = 0;
1394 return NULL;
1397 /* Here is the "usual case". Copy this tree node, and then
1398 tweak some special cases. */
1399 copy_tree_r (tp, walk_subtrees, NULL);
1401 /* If EXPR has block defined, map it to newly constructed block.
1402 When inlining we want EXPRs without block appear in the block
1403 of function call if we are not remapping a type. */
1404 if (EXPR_P (*tp))
1406 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1407 if (TREE_BLOCK (*tp))
1409 tree *n;
1410 n = id->decl_map->get (TREE_BLOCK (*tp));
1411 if (n)
1412 new_block = *n;
1414 TREE_SET_BLOCK (*tp, new_block);
1417 if (TREE_CODE (*tp) != OMP_CLAUSE)
1418 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1420 /* The copied TARGET_EXPR has never been expanded, even if the
1421 original node was expanded already. */
1422 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1424 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1425 TREE_OPERAND (*tp, 3) = NULL_TREE;
1428 /* Variable substitution need not be simple. In particular, the
1429 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1430 and friends are up-to-date. */
1431 else if (TREE_CODE (*tp) == ADDR_EXPR)
1433 int invariant = is_gimple_min_invariant (*tp);
1434 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1436 /* Handle the case where we substituted an INDIRECT_REF
1437 into the operand of the ADDR_EXPR. */
1438 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1439 && !id->do_not_fold)
1441 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1442 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1443 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1444 *tp = t;
1446 else
1447 recompute_tree_invariant_for_addr_expr (*tp);
1449 /* If this used to be invariant, but is not any longer,
1450 then regimplification is probably needed. */
1451 if (invariant && !is_gimple_min_invariant (*tp))
1452 id->regimplify = true;
1454 *walk_subtrees = 0;
1456 else if (TREE_CODE (*tp) == OMP_CLAUSE
1457 && (OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_AFFINITY
1458 || OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_DEPEND))
1460 tree t = OMP_CLAUSE_DECL (*tp);
1461 if (t
1462 && TREE_CODE (t) == TREE_LIST
1463 && TREE_PURPOSE (t)
1464 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
1466 *walk_subtrees = 0;
1467 OMP_CLAUSE_DECL (*tp) = copy_node (t);
1468 t = OMP_CLAUSE_DECL (*tp);
1469 TREE_PURPOSE (t) = copy_node (TREE_PURPOSE (t));
1470 for (int i = 0; i <= 4; i++)
1471 walk_tree (&TREE_VEC_ELT (TREE_PURPOSE (t), i),
1472 copy_tree_body_r, id, NULL);
1473 if (TREE_VEC_ELT (TREE_PURPOSE (t), 5))
1474 remap_block (&TREE_VEC_ELT (TREE_PURPOSE (t), 5), id);
1475 walk_tree (&TREE_VALUE (t), copy_tree_body_r, id, NULL);
1480 /* Keep iterating. */
1481 return NULL_TREE;
1484 /* Helper for remap_gimple_stmt. Given an EH region number for the
1485 source function, map that to the duplicate EH region number in
1486 the destination function. */
1488 static int
1489 remap_eh_region_nr (int old_nr, copy_body_data *id)
1491 eh_region old_r, new_r;
1493 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1494 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1496 return new_r->index;
1499 /* Similar, but operate on INTEGER_CSTs. */
1501 static tree
1502 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1504 int old_nr, new_nr;
1506 old_nr = tree_to_shwi (old_t_nr);
1507 new_nr = remap_eh_region_nr (old_nr, id);
1509 return build_int_cst (integer_type_node, new_nr);
1512 /* Helper for copy_bb. Remap statement STMT using the inlining
1513 information in ID. Return the new statement copy. */
1515 static gimple_seq
1516 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1518 gimple *copy = NULL;
1519 struct walk_stmt_info wi;
1520 bool skip_first = false;
1521 gimple_seq stmts = NULL;
1523 if (is_gimple_debug (stmt)
1524 && (gimple_debug_nonbind_marker_p (stmt)
1525 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1526 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1527 return NULL;
1529 if (!is_gimple_debug (stmt)
1530 && id->param_body_adjs
1531 && id->param_body_adjs->m_dead_stmts.contains (stmt))
1532 return NULL;
1534 /* Begin by recognizing trees that we'll completely rewrite for the
1535 inlining context. Our output for these trees is completely
1536 different from our input (e.g. RETURN_EXPR is deleted and morphs
1537 into an edge). Further down, we'll handle trees that get
1538 duplicated and/or tweaked. */
1540 /* When requested, GIMPLE_RETURN should be transformed to just the
1541 contained GIMPLE_ASSIGN. The branch semantics of the return will
1542 be handled elsewhere by manipulating the CFG rather than the
1543 statement. */
1544 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1546 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1548 /* If we're returning something, just turn that into an
1549 assignment to the equivalent of the original RESULT_DECL.
1550 If RETVAL is just the result decl, the result decl has
1551 already been set (e.g. a recent "foo (&result_decl, ...)");
1552 just toss the entire GIMPLE_RETURN. Likewise for when the
1553 call doesn't want the return value. */
1554 if (retval
1555 && (TREE_CODE (retval) != RESULT_DECL
1556 && (!id->call_stmt
1557 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1558 && (TREE_CODE (retval) != SSA_NAME
1559 || ! SSA_NAME_VAR (retval)
1560 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1562 copy = gimple_build_assign (id->do_not_unshare
1563 ? id->retvar : unshare_expr (id->retvar),
1564 retval);
1565 /* id->retvar is already substituted. Skip it on later remapping. */
1566 skip_first = true;
1568 else
1569 return NULL;
1571 else if (gimple_has_substatements (stmt))
1573 gimple_seq s1, s2;
1575 /* When cloning bodies from the C++ front end, we will be handed bodies
1576 in High GIMPLE form. Handle here all the High GIMPLE statements that
1577 have embedded statements. */
1578 switch (gimple_code (stmt))
1580 case GIMPLE_BIND:
1581 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1582 break;
1584 case GIMPLE_CATCH:
1586 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1587 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1588 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1590 break;
1592 case GIMPLE_EH_FILTER:
1593 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1594 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1595 break;
1597 case GIMPLE_TRY:
1598 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1599 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1600 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1601 break;
1603 case GIMPLE_WITH_CLEANUP_EXPR:
1604 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1605 copy = gimple_build_wce (s1);
1606 break;
1608 case GIMPLE_OMP_PARALLEL:
1610 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1611 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1612 copy = gimple_build_omp_parallel
1613 (s1,
1614 gimple_omp_parallel_clauses (omp_par_stmt),
1615 gimple_omp_parallel_child_fn (omp_par_stmt),
1616 gimple_omp_parallel_data_arg (omp_par_stmt));
1618 break;
1620 case GIMPLE_OMP_TASK:
1621 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1622 copy = gimple_build_omp_task
1623 (s1,
1624 gimple_omp_task_clauses (stmt),
1625 gimple_omp_task_child_fn (stmt),
1626 gimple_omp_task_data_arg (stmt),
1627 gimple_omp_task_copy_fn (stmt),
1628 gimple_omp_task_arg_size (stmt),
1629 gimple_omp_task_arg_align (stmt));
1630 break;
1632 case GIMPLE_OMP_FOR:
1633 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1634 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1635 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1636 gimple_omp_for_clauses (stmt),
1637 gimple_omp_for_collapse (stmt), s2);
1639 size_t i;
1640 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1642 gimple_omp_for_set_index (copy, i,
1643 gimple_omp_for_index (stmt, i));
1644 gimple_omp_for_set_initial (copy, i,
1645 gimple_omp_for_initial (stmt, i));
1646 gimple_omp_for_set_final (copy, i,
1647 gimple_omp_for_final (stmt, i));
1648 gimple_omp_for_set_incr (copy, i,
1649 gimple_omp_for_incr (stmt, i));
1650 gimple_omp_for_set_cond (copy, i,
1651 gimple_omp_for_cond (stmt, i));
1654 break;
1656 case GIMPLE_OMP_MASTER:
1657 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1658 copy = gimple_build_omp_master (s1);
1659 break;
1661 case GIMPLE_OMP_MASKED:
1662 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1663 copy = gimple_build_omp_masked
1664 (s1, gimple_omp_masked_clauses (stmt));
1665 break;
1667 case GIMPLE_OMP_SCOPE:
1668 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1669 copy = gimple_build_omp_scope
1670 (s1, gimple_omp_scope_clauses (stmt));
1671 break;
1673 case GIMPLE_OMP_TASKGROUP:
1674 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1675 copy = gimple_build_omp_taskgroup
1676 (s1, gimple_omp_taskgroup_clauses (stmt));
1677 break;
1679 case GIMPLE_OMP_ORDERED:
1680 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1681 copy = gimple_build_omp_ordered
1682 (s1,
1683 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1684 break;
1686 case GIMPLE_OMP_SCAN:
1687 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1688 copy = gimple_build_omp_scan
1689 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1690 break;
1692 case GIMPLE_OMP_SECTION:
1693 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1694 copy = gimple_build_omp_section (s1);
1695 break;
1697 case GIMPLE_OMP_SECTIONS:
1698 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1699 copy = gimple_build_omp_sections
1700 (s1, gimple_omp_sections_clauses (stmt));
1701 break;
1703 case GIMPLE_OMP_SINGLE:
1704 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1705 copy = gimple_build_omp_single
1706 (s1, gimple_omp_single_clauses (stmt));
1707 break;
1709 case GIMPLE_OMP_TARGET:
1710 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1711 copy = gimple_build_omp_target
1712 (s1, gimple_omp_target_kind (stmt),
1713 gimple_omp_target_clauses (stmt));
1714 break;
1716 case GIMPLE_OMP_TEAMS:
1717 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1718 copy = gimple_build_omp_teams
1719 (s1, gimple_omp_teams_clauses (stmt));
1720 break;
1722 case GIMPLE_OMP_CRITICAL:
1723 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1724 copy = gimple_build_omp_critical (s1,
1725 gimple_omp_critical_name
1726 (as_a <gomp_critical *> (stmt)),
1727 gimple_omp_critical_clauses
1728 (as_a <gomp_critical *> (stmt)));
1729 break;
1731 case GIMPLE_TRANSACTION:
1733 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1734 gtransaction *new_trans_stmt;
1735 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1736 id);
1737 copy = new_trans_stmt = gimple_build_transaction (s1);
1738 gimple_transaction_set_subcode (new_trans_stmt,
1739 gimple_transaction_subcode (old_trans_stmt));
1740 gimple_transaction_set_label_norm (new_trans_stmt,
1741 gimple_transaction_label_norm (old_trans_stmt));
1742 gimple_transaction_set_label_uninst (new_trans_stmt,
1743 gimple_transaction_label_uninst (old_trans_stmt));
1744 gimple_transaction_set_label_over (new_trans_stmt,
1745 gimple_transaction_label_over (old_trans_stmt));
1747 break;
1749 default:
1750 gcc_unreachable ();
1753 else
1755 if (gimple_assign_copy_p (stmt)
1756 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1757 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1759 /* Here we handle statements that are not completely rewritten.
1760 First we detect some inlining-induced bogosities for
1761 discarding. */
1763 /* Some assignments VAR = VAR; don't generate any rtl code
1764 and thus don't count as variable modification. Avoid
1765 keeping bogosities like 0 = 0. */
1766 tree decl = gimple_assign_lhs (stmt), value;
1767 tree *n;
1769 n = id->decl_map->get (decl);
1770 if (n)
1772 value = *n;
1773 STRIP_TYPE_NOPS (value);
1774 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1775 return NULL;
1779 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1780 in a block that we aren't copying during tree_function_versioning,
1781 just drop the clobber stmt. */
1782 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1784 tree lhs = gimple_assign_lhs (stmt);
1785 if (TREE_CODE (lhs) == MEM_REF
1786 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1788 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1789 if (gimple_bb (def_stmt)
1790 && !bitmap_bit_p (id->blocks_to_copy,
1791 gimple_bb (def_stmt)->index))
1792 return NULL;
1796 /* We do not allow CLOBBERs of handled components. In case
1797 returned value is stored via such handled component, remove
1798 the clobber so stmt verifier is happy. */
1799 if (gimple_clobber_p (stmt)
1800 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1802 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1803 if (!DECL_P (remapped)
1804 && TREE_CODE (remapped) != MEM_REF)
1805 return NULL;
1808 if (gimple_debug_bind_p (stmt))
1810 tree value;
1811 if (id->param_body_adjs
1812 && id->param_body_adjs->m_dead_stmts.contains (stmt))
1813 value = NULL_TREE;
1814 else
1815 value = gimple_debug_bind_get_value (stmt);
1816 gdebug *copy
1817 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1818 value, stmt);
1819 if (id->reset_location)
1820 gimple_set_location (copy, input_location);
1821 id->debug_stmts.safe_push (copy);
1822 gimple_seq_add_stmt (&stmts, copy);
1823 return stmts;
1825 if (gimple_debug_source_bind_p (stmt))
1827 gdebug *copy = gimple_build_debug_source_bind
1828 (gimple_debug_source_bind_get_var (stmt),
1829 gimple_debug_source_bind_get_value (stmt),
1830 stmt);
1831 if (id->reset_location)
1832 gimple_set_location (copy, input_location);
1833 id->debug_stmts.safe_push (copy);
1834 gimple_seq_add_stmt (&stmts, copy);
1835 return stmts;
1837 if (gimple_debug_nonbind_marker_p (stmt))
1839 /* If the inlined function has too many debug markers,
1840 don't copy them. */
1841 if (id->src_cfun->debug_marker_count
1842 > param_max_debug_marker_count
1843 || id->reset_location)
1844 return stmts;
1846 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1847 id->debug_stmts.safe_push (copy);
1848 gimple_seq_add_stmt (&stmts, copy);
1849 return stmts;
1852 /* Create a new deep copy of the statement. */
1853 copy = gimple_copy (stmt);
1855 /* Clear flags that need revisiting. */
1856 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1858 if (gimple_call_tail_p (call_stmt))
1859 gimple_call_set_tail (call_stmt, false);
1860 if (gimple_call_from_thunk_p (call_stmt))
1861 gimple_call_set_from_thunk (call_stmt, false);
1862 if (gimple_call_internal_p (call_stmt))
1863 switch (gimple_call_internal_fn (call_stmt))
1865 case IFN_GOMP_SIMD_LANE:
1866 case IFN_GOMP_SIMD_VF:
1867 case IFN_GOMP_SIMD_LAST_LANE:
1868 case IFN_GOMP_SIMD_ORDERED_START:
1869 case IFN_GOMP_SIMD_ORDERED_END:
1870 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1871 break;
1872 default:
1873 break;
1877 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1878 RESX and EH_DISPATCH. */
1879 if (id->eh_map)
1880 switch (gimple_code (copy))
1882 case GIMPLE_CALL:
1884 tree r, fndecl = gimple_call_fndecl (copy);
1885 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1886 switch (DECL_FUNCTION_CODE (fndecl))
1888 case BUILT_IN_EH_COPY_VALUES:
1889 r = gimple_call_arg (copy, 1);
1890 r = remap_eh_region_tree_nr (r, id);
1891 gimple_call_set_arg (copy, 1, r);
1892 /* FALLTHRU */
1894 case BUILT_IN_EH_POINTER:
1895 case BUILT_IN_EH_FILTER:
1896 r = gimple_call_arg (copy, 0);
1897 r = remap_eh_region_tree_nr (r, id);
1898 gimple_call_set_arg (copy, 0, r);
1899 break;
1901 default:
1902 break;
1905 /* Reset alias info if we didn't apply measures to
1906 keep it valid over inlining by setting DECL_PT_UID. */
1907 if (!id->src_cfun->gimple_df
1908 || !id->src_cfun->gimple_df->ipa_pta)
1909 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1911 break;
1913 case GIMPLE_RESX:
1915 gresx *resx_stmt = as_a <gresx *> (copy);
1916 int r = gimple_resx_region (resx_stmt);
1917 r = remap_eh_region_nr (r, id);
1918 gimple_resx_set_region (resx_stmt, r);
1920 break;
1922 case GIMPLE_EH_DISPATCH:
1924 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1925 int r = gimple_eh_dispatch_region (eh_dispatch);
1926 r = remap_eh_region_nr (r, id);
1927 gimple_eh_dispatch_set_region (eh_dispatch, r);
1929 break;
1931 default:
1932 break;
1936 /* If STMT has a block defined, map it to the newly constructed block. */
1937 if (tree block = gimple_block (copy))
1939 tree *n;
1940 n = id->decl_map->get (block);
1941 gcc_assert (n);
1942 gimple_set_block (copy, *n);
1944 if (id->param_body_adjs)
1946 gimple_seq extra_stmts = NULL;
1947 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts, stmt);
1948 if (!gimple_seq_empty_p (extra_stmts))
1950 memset (&wi, 0, sizeof (wi));
1951 wi.info = id;
1952 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1953 !gsi_end_p (egsi);
1954 gsi_next (&egsi))
1955 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1956 gimple_seq_add_seq (&stmts, extra_stmts);
1960 if (id->reset_location)
1961 gimple_set_location (copy, input_location);
1963 /* Debug statements ought to be rebuilt and not copied. */
1964 gcc_checking_assert (!is_gimple_debug (copy));
1966 /* Remap all the operands in COPY. */
1967 memset (&wi, 0, sizeof (wi));
1968 wi.info = id;
1969 if (skip_first)
1970 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1971 else
1972 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1974 /* Clear the copied virtual operands. We are not remapping them here
1975 but are going to recreate them from scratch. */
1976 if (gimple_has_mem_ops (copy))
1978 gimple_set_vdef (copy, NULL_TREE);
1979 gimple_set_vuse (copy, NULL_TREE);
1982 if (cfun->can_throw_non_call_exceptions)
1984 /* When inlining a function which does not have non-call exceptions
1985 enabled into a function that has (which only happens with
1986 always-inline) we have to fixup stmts that cannot throw. */
1987 if (gcond *cond = dyn_cast <gcond *> (copy))
1988 if (gimple_could_trap_p (cond))
1990 gassign *cmp
1991 = gimple_build_assign (make_ssa_name (boolean_type_node),
1992 gimple_cond_code (cond),
1993 gimple_cond_lhs (cond),
1994 gimple_cond_rhs (cond));
1995 gimple_seq_add_stmt (&stmts, cmp);
1996 gimple_cond_set_code (cond, NE_EXPR);
1997 gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
1998 gimple_cond_set_rhs (cond, boolean_false_node);
2002 gimple_seq_add_stmt (&stmts, copy);
2003 return stmts;
2007 /* Copy basic block, scale profile accordingly. Edges will be taken care of
2008 later */
2010 static basic_block
2011 copy_bb (copy_body_data *id, basic_block bb,
2012 profile_count num, profile_count den)
2014 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
2015 basic_block copy_basic_block;
2016 tree decl;
2017 basic_block prev;
2019 profile_count::adjust_for_ipa_scaling (&num, &den);
2021 /* Search for previous copied basic block. */
2022 prev = bb->prev_bb;
2023 while (!prev->aux)
2024 prev = prev->prev_bb;
2026 /* create_basic_block() will append every new block to
2027 basic_block_info automatically. */
2028 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2029 copy_basic_block->count = bb->count.apply_scale (num, den);
2031 copy_gsi = gsi_start_bb (copy_basic_block);
2033 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2035 gimple_seq stmts;
2036 gimple *stmt = gsi_stmt (gsi);
2037 gimple *orig_stmt = stmt;
2038 gimple_stmt_iterator stmts_gsi;
2039 bool stmt_added = false;
2041 id->regimplify = false;
2042 stmts = remap_gimple_stmt (stmt, id);
2044 if (gimple_seq_empty_p (stmts))
2045 continue;
2047 seq_gsi = copy_gsi;
2049 for (stmts_gsi = gsi_start (stmts);
2050 !gsi_end_p (stmts_gsi); )
2052 stmt = gsi_stmt (stmts_gsi);
2054 /* Advance iterator now before stmt is moved to seq_gsi. */
2055 gsi_next (&stmts_gsi);
2057 if (gimple_nop_p (stmt))
2058 continue;
2060 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2061 orig_stmt);
2063 /* With return slot optimization we can end up with
2064 non-gimple (foo *)&this->m, fix that here. */
2065 if (is_gimple_assign (stmt)
2066 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2067 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2069 tree new_rhs;
2070 new_rhs = force_gimple_operand_gsi (&seq_gsi,
2071 gimple_assign_rhs1 (stmt),
2072 true, NULL, false,
2073 GSI_CONTINUE_LINKING);
2074 gimple_assign_set_rhs1 (stmt, new_rhs);
2075 id->regimplify = false;
2078 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2080 if (id->regimplify)
2081 gimple_regimplify_operands (stmt, &seq_gsi);
2083 stmt_added = true;
2086 if (!stmt_added)
2087 continue;
2089 /* If copy_basic_block has been empty at the start of this iteration,
2090 call gsi_start_bb again to get at the newly added statements. */
2091 if (gsi_end_p (copy_gsi))
2092 copy_gsi = gsi_start_bb (copy_basic_block);
2093 else
2094 gsi_next (&copy_gsi);
2096 /* Process the new statement. The call to gimple_regimplify_operands
2097 possibly turned the statement into multiple statements, we
2098 need to process all of them. */
2101 tree fn;
2102 gcall *call_stmt;
2104 stmt = gsi_stmt (copy_gsi);
2105 call_stmt = dyn_cast <gcall *> (stmt);
2106 if (call_stmt
2107 && gimple_call_va_arg_pack_p (call_stmt)
2108 && id->call_stmt
2109 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2111 /* __builtin_va_arg_pack () should be replaced by
2112 all arguments corresponding to ... in the caller. */
2113 tree p;
2114 gcall *new_call;
2115 vec<tree> argarray;
2116 size_t nargs_caller = gimple_call_num_args (id->call_stmt);
2117 size_t nargs = nargs_caller;
2119 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2121 /* Avoid crashing on invalid IL that doesn't have a
2122 varargs function or that passes not enough arguments. */
2123 if (nargs == 0)
2124 break;
2125 nargs--;
2128 /* Create the new array of arguments. */
2129 size_t nargs_callee = gimple_call_num_args (call_stmt);
2130 size_t n = nargs + nargs_callee;
2131 argarray.create (n);
2132 argarray.safe_grow_cleared (n, true);
2134 /* Copy all the arguments before '...' */
2135 if (nargs_callee)
2136 memcpy (argarray.address (),
2137 gimple_call_arg_ptr (call_stmt, 0),
2138 nargs_callee * sizeof (tree));
2140 /* Append the arguments passed in '...' */
2141 if (nargs)
2142 memcpy (argarray.address () + nargs_callee,
2143 gimple_call_arg_ptr (id->call_stmt, 0)
2144 + (nargs_caller - nargs), nargs * sizeof (tree));
2146 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2147 argarray);
2149 argarray.release ();
2151 /* Copy all GIMPLE_CALL flags, location and block, except
2152 GF_CALL_VA_ARG_PACK. */
2153 gimple_call_copy_flags (new_call, call_stmt);
2154 gimple_call_set_va_arg_pack (new_call, false);
2155 gimple_call_set_fntype (new_call, gimple_call_fntype (call_stmt));
2156 /* location includes block. */
2157 gimple_set_location (new_call, gimple_location (stmt));
2158 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2160 gsi_replace (&copy_gsi, new_call, false);
2161 stmt = new_call;
2163 else if (call_stmt
2164 && id->call_stmt
2165 && (decl = gimple_call_fndecl (stmt))
2166 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2168 /* __builtin_va_arg_pack_len () should be replaced by
2169 the number of anonymous arguments. */
2170 size_t nargs = gimple_call_num_args (id->call_stmt);
2171 tree count, p;
2172 gimple *new_stmt;
2174 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2175 nargs--;
2177 if (!gimple_call_lhs (stmt))
2179 /* Drop unused calls. */
2180 gsi_remove (&copy_gsi, false);
2181 continue;
2183 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2185 count = build_int_cst (integer_type_node, nargs);
2186 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2187 gsi_replace (&copy_gsi, new_stmt, false);
2188 stmt = new_stmt;
2190 else if (nargs != 0)
2192 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2193 count = build_int_cst (integer_type_node, nargs);
2194 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2195 PLUS_EXPR, newlhs, count);
2196 gimple_call_set_lhs (stmt, newlhs);
2197 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2200 else if (call_stmt
2201 && id->call_stmt
2202 && gimple_call_internal_p (stmt)
2203 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2205 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2206 gsi_remove (&copy_gsi, false);
2207 continue;
2210 /* Statements produced by inlining can be unfolded, especially
2211 when we constant propagated some operands. We can't fold
2212 them right now for two reasons:
2213 1) folding require SSA_NAME_DEF_STMTs to be correct
2214 2) we can't change function calls to builtins.
2215 So we just mark statement for later folding. We mark
2216 all new statements, instead just statements that has changed
2217 by some nontrivial substitution so even statements made
2218 foldable indirectly are updated. If this turns out to be
2219 expensive, copy_body can be told to watch for nontrivial
2220 changes. */
2221 if (id->statements_to_fold)
2222 id->statements_to_fold->add (stmt);
2224 /* We're duplicating a CALL_EXPR. Find any corresponding
2225 callgraph edges and update or duplicate them. */
2226 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2228 struct cgraph_edge *edge;
2230 switch (id->transform_call_graph_edges)
2232 case CB_CGE_DUPLICATE:
2233 edge = id->src_node->get_edge (orig_stmt);
2234 if (edge)
2236 struct cgraph_edge *old_edge = edge;
2238 /* A speculative call is consist of multiple
2239 edges - indirect edge and one or more direct edges
2240 Duplicate the whole thing and distribute frequencies
2241 accordingly. */
2242 if (edge->speculative)
2244 int n = 0;
2245 profile_count direct_cnt
2246 = profile_count::zero ();
2248 /* First figure out the distribution of counts
2249 so we can re-scale BB profile accordingly. */
2250 for (cgraph_edge *e = old_edge; e;
2251 e = e->next_speculative_call_target ())
2252 direct_cnt = direct_cnt + e->count;
2254 cgraph_edge *indirect
2255 = old_edge->speculative_call_indirect_edge ();
2256 profile_count indir_cnt = indirect->count;
2258 /* Next iterate all direct edges, clone it and its
2259 corresponding reference and update profile. */
2260 for (cgraph_edge *e = old_edge;
2262 e = e->next_speculative_call_target ())
2264 profile_count cnt = e->count;
2266 id->dst_node->clone_reference
2267 (e->speculative_call_target_ref (), stmt);
2268 edge = e->clone (id->dst_node, call_stmt,
2269 gimple_uid (stmt), num, den,
2270 true);
2271 profile_probability prob
2272 = cnt.probability_in (direct_cnt
2273 + indir_cnt);
2274 edge->count
2275 = copy_basic_block->count.apply_probability
2276 (prob);
2277 n++;
2279 gcc_checking_assert
2280 (indirect->num_speculative_call_targets_p ()
2281 == n);
2283 /* Duplicate the indirect edge after all direct edges
2284 cloned. */
2285 indirect = indirect->clone (id->dst_node, call_stmt,
2286 gimple_uid (stmt),
2287 num, den,
2288 true);
2290 profile_probability prob
2291 = indir_cnt.probability_in (direct_cnt
2292 + indir_cnt);
2293 indirect->count
2294 = copy_basic_block->count.apply_probability (prob);
2296 else
2298 edge = edge->clone (id->dst_node, call_stmt,
2299 gimple_uid (stmt),
2300 num, den,
2301 true);
2302 edge->count = copy_basic_block->count;
2305 break;
2307 case CB_CGE_MOVE_CLONES:
2308 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2309 call_stmt);
2310 edge = id->dst_node->get_edge (stmt);
2311 break;
2313 case CB_CGE_MOVE:
2314 edge = id->dst_node->get_edge (orig_stmt);
2315 if (edge)
2316 edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2317 break;
2319 default:
2320 gcc_unreachable ();
2323 /* Constant propagation on argument done during inlining
2324 may create new direct call. Produce an edge for it. */
2325 if ((!edge
2326 || (edge->indirect_inlining_edge
2327 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2328 && id->dst_node->definition
2329 && (fn = gimple_call_fndecl (stmt)) != NULL)
2331 struct cgraph_node *dest = cgraph_node::get_create (fn);
2333 /* We have missing edge in the callgraph. This can happen
2334 when previous inlining turned an indirect call into a
2335 direct call by constant propagating arguments or we are
2336 producing dead clone (for further cloning). In all
2337 other cases we hit a bug (incorrect node sharing is the
2338 most common reason for missing edges). */
2339 gcc_assert (!dest->definition
2340 || dest->address_taken
2341 || !id->src_node->definition
2342 || !id->dst_node->definition);
2343 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2344 id->dst_node->create_edge_including_clones
2345 (dest, orig_stmt, call_stmt, bb->count,
2346 CIF_ORIGINALLY_INDIRECT_CALL);
2347 else
2348 id->dst_node->create_edge (dest, call_stmt,
2349 bb->count)->inline_failed
2350 = CIF_ORIGINALLY_INDIRECT_CALL;
2351 if (dump_file)
2353 fprintf (dump_file, "Created new direct edge to %s\n",
2354 dest->dump_name ());
2358 notice_special_calls (as_a <gcall *> (stmt));
2361 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2362 id->eh_map, id->eh_lp_nr);
2364 gsi_next (&copy_gsi);
2366 while (!gsi_end_p (copy_gsi));
2368 copy_gsi = gsi_last_bb (copy_basic_block);
2371 return copy_basic_block;
2374 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2375 form is quite easy, since dominator relationship for old basic blocks does
2376 not change.
2378 There is however exception where inlining might change dominator relation
2379 across EH edges from basic block within inlined functions destinating
2380 to landing pads in function we inline into.
2382 The function fills in PHI_RESULTs of such PHI nodes if they refer
2383 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2384 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2385 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2386 set, and this means that there will be no overlapping live ranges
2387 for the underlying symbol.
2389 This might change in future if we allow redirecting of EH edges and
2390 we might want to change way build CFG pre-inlining to include
2391 all the possible edges then. */
2392 static void
2393 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2394 bool can_throw, bool nonlocal_goto)
2396 edge e;
2397 edge_iterator ei;
2399 FOR_EACH_EDGE (e, ei, bb->succs)
2400 if (!e->dest->aux
2401 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2403 gphi *phi;
2404 gphi_iterator si;
2406 if (!nonlocal_goto)
2407 gcc_assert (e->flags & EDGE_EH);
2409 if (!can_throw)
2410 gcc_assert (!(e->flags & EDGE_EH));
2412 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2414 edge re;
2416 phi = si.phi ();
2418 /* For abnormal goto/call edges the receiver can be the
2419 ENTRY_BLOCK. Do not assert this cannot happen. */
2421 gcc_assert ((e->flags & EDGE_EH)
2422 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2424 re = find_edge (ret_bb, e->dest);
2425 gcc_checking_assert (re);
2426 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2427 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2429 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2430 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2435 /* Insert clobbers for automatic variables of inlined ID->src_fn
2436 function at the start of basic block ID->eh_landing_pad_dest. */
2438 static void
2439 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2441 tree var;
2442 basic_block bb = id->eh_landing_pad_dest;
2443 live_vars_map *vars = NULL;
2444 unsigned int cnt = 0;
2445 unsigned int i;
2446 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2447 if (VAR_P (var)
2448 && !DECL_HARD_REGISTER (var)
2449 && !TREE_THIS_VOLATILE (var)
2450 && !DECL_HAS_VALUE_EXPR_P (var)
2451 && !is_gimple_reg (var)
2452 && auto_var_in_fn_p (var, id->src_fn)
2453 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2455 tree *t = id->decl_map->get (var);
2456 if (!t)
2457 continue;
2458 tree new_var = *t;
2459 if (VAR_P (new_var)
2460 && !DECL_HARD_REGISTER (new_var)
2461 && !TREE_THIS_VOLATILE (new_var)
2462 && !DECL_HAS_VALUE_EXPR_P (new_var)
2463 && !is_gimple_reg (new_var)
2464 && auto_var_in_fn_p (new_var, id->dst_fn))
2466 if (vars == NULL)
2467 vars = new live_vars_map;
2468 vars->put (DECL_UID (var), cnt++);
2471 if (vars == NULL)
2472 return;
2474 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2475 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2476 if (VAR_P (var))
2478 edge e;
2479 edge_iterator ei;
2480 bool needed = false;
2481 unsigned int *v = vars->get (DECL_UID (var));
2482 if (v == NULL)
2483 continue;
2484 FOR_EACH_EDGE (e, ei, bb->preds)
2485 if ((e->flags & EDGE_EH) != 0
2486 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2488 basic_block src_bb = (basic_block) e->src->aux;
2490 if (bitmap_bit_p (&live[src_bb->index], *v))
2492 needed = true;
2493 break;
2496 if (needed)
2498 tree new_var = *id->decl_map->get (var);
2499 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2500 tree clobber = build_clobber (TREE_TYPE (new_var));
2501 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2502 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2505 destroy_live_vars (live);
2506 delete vars;
2509 /* Copy edges from BB into its copy constructed earlier, scale profile
2510 accordingly. Edges will be taken care of later. Assume aux
2511 pointers to point to the copies of each BB. Return true if any
2512 debug stmts are left after a statement that must end the basic block. */
2514 static bool
2515 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2516 basic_block ret_bb, basic_block abnormal_goto_dest,
2517 copy_body_data *id)
2519 basic_block new_bb = (basic_block) bb->aux;
2520 edge_iterator ei;
2521 edge old_edge;
2522 gimple_stmt_iterator si;
2523 bool need_debug_cleanup = false;
2525 /* Use the indices from the original blocks to create edges for the
2526 new ones. */
2527 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2528 if (!(old_edge->flags & EDGE_EH))
2530 edge new_edge;
2531 int flags = old_edge->flags;
2532 location_t locus = old_edge->goto_locus;
2534 /* Return edges do get a FALLTHRU flag when they get inlined. */
2535 if (old_edge->dest->index == EXIT_BLOCK
2536 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2537 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2538 flags |= EDGE_FALLTHRU;
2540 new_edge
2541 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2542 new_edge->probability = old_edge->probability;
2543 if (!id->reset_location)
2544 new_edge->goto_locus = remap_location (locus, id);
2547 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2548 return false;
2550 /* When doing function splitting, we must decrease count of the return block
2551 which was previously reachable by block we did not copy. */
2552 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2553 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2554 if (old_edge->src->index != ENTRY_BLOCK
2555 && !old_edge->src->aux)
2556 new_bb->count -= old_edge->count ().apply_scale (num, den);
2558 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2560 gimple *copy_stmt;
2561 bool can_throw, nonlocal_goto;
2563 copy_stmt = gsi_stmt (si);
2564 if (!is_gimple_debug (copy_stmt))
2565 update_stmt (copy_stmt);
2567 /* Do this before the possible split_block. */
2568 gsi_next (&si);
2570 /* If this tree could throw an exception, there are two
2571 cases where we need to add abnormal edge(s): the
2572 tree wasn't in a region and there is a "current
2573 region" in the caller; or the original tree had
2574 EH edges. In both cases split the block after the tree,
2575 and add abnormal edge(s) as needed; we need both
2576 those from the callee and the caller.
2577 We check whether the copy can throw, because the const
2578 propagation can change an INDIRECT_REF which throws
2579 into a COMPONENT_REF which doesn't. If the copy
2580 can throw, the original could also throw. */
2581 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2582 nonlocal_goto
2583 = (stmt_can_make_abnormal_goto (copy_stmt)
2584 && !computed_goto_p (copy_stmt));
2586 if (can_throw || nonlocal_goto)
2588 if (!gsi_end_p (si))
2590 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2591 gsi_next (&si);
2592 if (gsi_end_p (si))
2593 need_debug_cleanup = true;
2595 if (!gsi_end_p (si))
2596 /* Note that bb's predecessor edges aren't necessarily
2597 right at this point; split_block doesn't care. */
2599 edge e = split_block (new_bb, copy_stmt);
2601 new_bb = e->dest;
2602 new_bb->aux = e->src->aux;
2603 si = gsi_start_bb (new_bb);
2607 bool update_probs = false;
2609 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2611 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2612 update_probs = true;
2614 else if (can_throw)
2616 make_eh_edges (copy_stmt);
2617 update_probs = true;
2620 /* EH edges may not match old edges. Copy as much as possible. */
2621 if (update_probs)
2623 edge e;
2624 edge_iterator ei;
2625 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2627 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2628 if ((old_edge->flags & EDGE_EH)
2629 && (e = find_edge (copy_stmt_bb,
2630 (basic_block) old_edge->dest->aux))
2631 && (e->flags & EDGE_EH))
2632 e->probability = old_edge->probability;
2634 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2635 if (e->flags & EDGE_EH)
2637 if (!e->probability.initialized_p ())
2638 e->probability = profile_probability::never ();
2639 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2641 if (id->eh_landing_pad_dest == NULL)
2642 id->eh_landing_pad_dest = e->dest;
2643 else
2644 gcc_assert (id->eh_landing_pad_dest == e->dest);
2650 /* If the call we inline cannot make abnormal goto do not add
2651 additional abnormal edges but only retain those already present
2652 in the original function body. */
2653 if (abnormal_goto_dest == NULL)
2654 nonlocal_goto = false;
2655 if (nonlocal_goto)
2657 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2659 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2660 nonlocal_goto = false;
2661 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2662 in OpenMP regions which aren't allowed to be left abnormally.
2663 So, no need to add abnormal edge in that case. */
2664 else if (is_gimple_call (copy_stmt)
2665 && gimple_call_internal_p (copy_stmt)
2666 && (gimple_call_internal_fn (copy_stmt)
2667 == IFN_ABNORMAL_DISPATCHER)
2668 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2669 nonlocal_goto = false;
2670 else
2671 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2672 EDGE_ABNORMAL);
2675 if ((can_throw || nonlocal_goto)
2676 && gimple_in_ssa_p (cfun))
2677 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2678 can_throw, nonlocal_goto);
2680 return need_debug_cleanup;
2683 /* Copy the PHIs. All blocks and edges are copied, some blocks
2684 was possibly split and new outgoing EH edges inserted.
2685 BB points to the block of original function and AUX pointers links
2686 the original and newly copied blocks. */
2688 static void
2689 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2691 basic_block const new_bb = (basic_block) bb->aux;
2692 edge_iterator ei;
2693 gphi *phi;
2694 gphi_iterator si;
2695 edge new_edge;
2696 bool inserted = false;
2698 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2700 tree res, new_res;
2701 gphi *new_phi;
2703 phi = si.phi ();
2704 res = PHI_RESULT (phi);
2705 new_res = res;
2706 if (!virtual_operand_p (res)
2707 && (!id->param_body_adjs
2708 || !id->param_body_adjs->m_dead_stmts.contains (phi)))
2710 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2711 if (EDGE_COUNT (new_bb->preds) == 0)
2713 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2714 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2716 else
2718 new_phi = create_phi_node (new_res, new_bb);
2719 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2721 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2722 bb);
2723 tree arg;
2724 tree new_arg;
2725 edge_iterator ei2;
2726 location_t locus;
2728 /* When doing partial cloning, we allow PHIs on the entry
2729 block as long as all the arguments are the same.
2730 Find any input edge to see argument to copy. */
2731 if (!old_edge)
2732 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2733 if (!old_edge->src->aux)
2734 break;
2736 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2737 new_arg = arg;
2738 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2739 gcc_assert (new_arg);
2740 /* With return slot optimization we can end up with
2741 non-gimple (foo *)&this->m, fix that here. */
2742 if (TREE_CODE (new_arg) != SSA_NAME
2743 && TREE_CODE (new_arg) != FUNCTION_DECL
2744 && !is_gimple_val (new_arg))
2746 gimple_seq stmts = NULL;
2747 new_arg = force_gimple_operand (new_arg, &stmts, true,
2748 NULL);
2749 gsi_insert_seq_on_edge (new_edge, stmts);
2750 inserted = true;
2752 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2753 if (id->reset_location)
2754 locus = input_location;
2755 else
2756 locus = remap_location (locus, id);
2757 add_phi_arg (new_phi, new_arg, new_edge, locus);
2763 /* Commit the delayed edge insertions. */
2764 if (inserted)
2765 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2766 gsi_commit_one_edge_insert (new_edge, NULL);
2770 /* Wrapper for remap_decl so it can be used as a callback. */
2772 static tree
2773 remap_decl_1 (tree decl, void *data)
2775 return remap_decl (decl, (copy_body_data *) data);
2778 /* Build struct function and associated datastructures for the new clone
2779 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2780 the cfun to the function of new_fndecl (and current_function_decl too). */
2782 static void
2783 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2785 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2787 if (!DECL_ARGUMENTS (new_fndecl))
2788 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2789 if (!DECL_RESULT (new_fndecl))
2790 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2792 /* Register specific tree functions. */
2793 gimple_register_cfg_hooks ();
2795 /* Get clean struct function. */
2796 push_struct_function (new_fndecl);
2798 /* We will rebuild these, so just sanity check that they are empty. */
2799 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2800 gcc_assert (cfun->local_decls == NULL);
2801 gcc_assert (cfun->cfg == NULL);
2802 gcc_assert (cfun->decl == new_fndecl);
2804 /* Copy items we preserve during cloning. */
2805 cfun->static_chain_decl = src_cfun->static_chain_decl;
2806 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2807 cfun->function_end_locus = src_cfun->function_end_locus;
2808 cfun->curr_properties = src_cfun->curr_properties;
2809 cfun->last_verified = src_cfun->last_verified;
2810 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2811 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2812 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2813 cfun->calls_eh_return = src_cfun->calls_eh_return;
2814 cfun->stdarg = src_cfun->stdarg;
2815 cfun->after_inlining = src_cfun->after_inlining;
2816 cfun->can_throw_non_call_exceptions
2817 = src_cfun->can_throw_non_call_exceptions;
2818 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2819 cfun->returns_struct = src_cfun->returns_struct;
2820 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2822 init_empty_tree_cfg ();
2824 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2826 profile_count num = count;
2827 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2828 profile_count::adjust_for_ipa_scaling (&num, &den);
2830 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2831 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2832 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2833 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2834 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2835 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2836 if (src_cfun->eh)
2837 init_eh_for_function ();
2839 if (src_cfun->gimple_df)
2841 init_tree_ssa (cfun);
2842 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2843 if (cfun->gimple_df->in_ssa_p)
2844 init_ssa_operands (cfun);
2848 /* Helper function for copy_cfg_body. Move debug stmts from the end
2849 of NEW_BB to the beginning of successor basic blocks when needed. If the
2850 successor has multiple predecessors, reset them, otherwise keep
2851 their value. */
2853 static void
2854 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2856 edge e;
2857 edge_iterator ei;
2858 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2860 if (gsi_end_p (si)
2861 || gsi_one_before_end_p (si)
2862 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2863 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2864 return;
2866 FOR_EACH_EDGE (e, ei, new_bb->succs)
2868 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2869 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2870 while (is_gimple_debug (gsi_stmt (ssi)))
2872 gimple *stmt = gsi_stmt (ssi);
2873 gdebug *new_stmt;
2874 tree var;
2875 tree value;
2877 /* For the last edge move the debug stmts instead of copying
2878 them. */
2879 if (ei_one_before_end_p (ei))
2881 si = ssi;
2882 gsi_prev (&ssi);
2883 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2885 gimple_debug_bind_reset_value (stmt);
2886 gimple_set_location (stmt, UNKNOWN_LOCATION);
2888 gsi_remove (&si, false);
2889 gsi_insert_before (&dsi, stmt, GSI_NEW_STMT);
2890 continue;
2893 if (gimple_debug_bind_p (stmt))
2895 var = gimple_debug_bind_get_var (stmt);
2896 if (single_pred_p (e->dest))
2898 value = gimple_debug_bind_get_value (stmt);
2899 value = unshare_expr (value);
2900 new_stmt = gimple_build_debug_bind (var, value, stmt);
2902 else
2903 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2905 else if (gimple_debug_source_bind_p (stmt))
2907 var = gimple_debug_source_bind_get_var (stmt);
2908 value = gimple_debug_source_bind_get_value (stmt);
2909 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2911 else if (gimple_debug_nonbind_marker_p (stmt))
2912 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2913 else
2914 gcc_unreachable ();
2915 gsi_insert_before (&dsi, new_stmt, GSI_NEW_STMT);
2916 id->debug_stmts.safe_push (new_stmt);
2917 gsi_prev (&ssi);
2922 /* Make a copy of the sub-loops of SRC_PARENT and place them
2923 as siblings of DEST_PARENT. */
2925 static void
2926 copy_loops (copy_body_data *id,
2927 class loop *dest_parent, class loop *src_parent)
2929 class loop *src_loop = src_parent->inner;
2930 while (src_loop)
2932 if (!id->blocks_to_copy
2933 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2935 class loop *dest_loop = alloc_loop ();
2937 /* Assign the new loop its header and latch and associate
2938 those with the new loop. */
2939 dest_loop->header = (basic_block)src_loop->header->aux;
2940 dest_loop->header->loop_father = dest_loop;
2941 if (src_loop->latch != NULL)
2943 dest_loop->latch = (basic_block)src_loop->latch->aux;
2944 dest_loop->latch->loop_father = dest_loop;
2947 /* Copy loop meta-data. */
2948 copy_loop_info (src_loop, dest_loop);
2949 if (dest_loop->unroll)
2950 cfun->has_unroll = true;
2951 if (dest_loop->force_vectorize)
2952 cfun->has_force_vectorize_loops = true;
2953 if (id->src_cfun->last_clique != 0)
2954 dest_loop->owned_clique
2955 = remap_dependence_clique (id,
2956 src_loop->owned_clique
2957 ? src_loop->owned_clique : 1);
2959 /* Finally place it into the loop array and the loop tree. */
2960 place_new_loop (cfun, dest_loop);
2961 flow_loop_tree_node_add (dest_parent, dest_loop);
2963 if (src_loop->simduid)
2965 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2966 cfun->has_simduid_loops = true;
2969 /* Recurse. */
2970 copy_loops (id, dest_loop, src_loop);
2972 src_loop = src_loop->next;
2976 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2978 void
2979 redirect_all_calls (copy_body_data * id, basic_block bb)
2981 gimple_stmt_iterator si;
2982 gimple *last = last_stmt (bb);
2983 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2985 gimple *stmt = gsi_stmt (si);
2986 if (is_gimple_call (stmt))
2988 tree old_lhs = gimple_call_lhs (stmt);
2989 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2990 if (edge)
2992 gimple *new_stmt
2993 = cgraph_edge::redirect_call_stmt_to_callee (edge);
2994 /* If IPA-SRA transformation, run as part of edge redirection,
2995 removed the LHS because it is unused, save it to
2996 killed_new_ssa_names so that we can prune it from debug
2997 statements. */
2998 if (old_lhs
2999 && TREE_CODE (old_lhs) == SSA_NAME
3000 && !gimple_call_lhs (new_stmt))
3002 if (!id->killed_new_ssa_names)
3003 id->killed_new_ssa_names = new hash_set<tree> (16);
3004 id->killed_new_ssa_names->add (old_lhs);
3007 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
3008 gimple_purge_dead_eh_edges (bb);
3014 /* Make a copy of the body of FN so that it can be inserted inline in
3015 another function. Walks FN via CFG, returns new fndecl. */
3017 static tree
3018 copy_cfg_body (copy_body_data * id,
3019 basic_block entry_block_map, basic_block exit_block_map,
3020 basic_block new_entry)
3022 tree callee_fndecl = id->src_fn;
3023 /* Original cfun for the callee, doesn't change. */
3024 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3025 struct function *cfun_to_copy;
3026 basic_block bb;
3027 tree new_fndecl = NULL;
3028 bool need_debug_cleanup = false;
3029 int last;
3030 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3031 profile_count num = entry_block_map->count;
3033 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3035 /* Register specific tree functions. */
3036 gimple_register_cfg_hooks ();
3038 /* If we are inlining just region of the function, make sure to connect
3039 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
3040 part of loop, we must compute frequency and probability of
3041 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3042 probabilities of edges incoming from nonduplicated region. */
3043 if (new_entry)
3045 edge e;
3046 edge_iterator ei;
3047 den = profile_count::zero ();
3049 FOR_EACH_EDGE (e, ei, new_entry->preds)
3050 if (!e->src->aux)
3051 den += e->count ();
3052 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
3055 profile_count::adjust_for_ipa_scaling (&num, &den);
3057 /* Must have a CFG here at this point. */
3058 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3059 (DECL_STRUCT_FUNCTION (callee_fndecl)));
3062 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3063 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3064 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3065 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3067 /* Duplicate any exception-handling regions. */
3068 if (cfun->eh)
3069 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3070 remap_decl_1, id);
3072 /* Use aux pointers to map the original blocks to copy. */
3073 FOR_EACH_BB_FN (bb, cfun_to_copy)
3074 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3076 basic_block new_bb = copy_bb (id, bb, num, den);
3077 bb->aux = new_bb;
3078 new_bb->aux = bb;
3079 new_bb->loop_father = entry_block_map->loop_father;
3082 last = last_basic_block_for_fn (cfun);
3084 /* Now that we've duplicated the blocks, duplicate their edges. */
3085 basic_block abnormal_goto_dest = NULL;
3086 if (id->call_stmt
3087 && stmt_can_make_abnormal_goto (id->call_stmt))
3089 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3091 bb = gimple_bb (id->call_stmt);
3092 gsi_next (&gsi);
3093 if (gsi_end_p (gsi))
3094 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3096 FOR_ALL_BB_FN (bb, cfun_to_copy)
3097 if (!id->blocks_to_copy
3098 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3099 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3100 abnormal_goto_dest, id);
3102 if (id->eh_landing_pad_dest)
3104 add_clobbers_to_eh_landing_pad (id);
3105 id->eh_landing_pad_dest = NULL;
3108 if (new_entry)
3110 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3111 EDGE_FALLTHRU);
3112 e->probability = profile_probability::always ();
3115 /* Duplicate the loop tree, if available and wanted. */
3116 if (loops_for_fn (src_cfun) != NULL
3117 && current_loops != NULL)
3119 copy_loops (id, entry_block_map->loop_father,
3120 get_loop (src_cfun, 0));
3121 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3122 loops_state_set (LOOPS_NEED_FIXUP);
3125 /* If the loop tree in the source function needed fixup, mark the
3126 destination loop tree for fixup, too. */
3127 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3128 loops_state_set (LOOPS_NEED_FIXUP);
3130 if (gimple_in_ssa_p (cfun))
3131 FOR_ALL_BB_FN (bb, cfun_to_copy)
3132 if (!id->blocks_to_copy
3133 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3134 copy_phis_for_bb (bb, id);
3136 FOR_ALL_BB_FN (bb, cfun_to_copy)
3137 if (bb->aux)
3139 if (need_debug_cleanup
3140 && bb->index != ENTRY_BLOCK
3141 && bb->index != EXIT_BLOCK)
3142 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3143 /* Update call edge destinations. This cannot be done before loop
3144 info is updated, because we may split basic blocks. */
3145 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3146 && bb->index != ENTRY_BLOCK
3147 && bb->index != EXIT_BLOCK)
3148 redirect_all_calls (id, (basic_block)bb->aux);
3149 ((basic_block)bb->aux)->aux = NULL;
3150 bb->aux = NULL;
3153 /* Zero out AUX fields of newly created block during EH edge
3154 insertion. */
3155 for (; last < last_basic_block_for_fn (cfun); last++)
3157 if (need_debug_cleanup)
3158 maybe_move_debug_stmts_to_successors (id,
3159 BASIC_BLOCK_FOR_FN (cfun, last));
3160 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3161 /* Update call edge destinations. This cannot be done before loop
3162 info is updated, because we may split basic blocks. */
3163 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3164 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3166 entry_block_map->aux = NULL;
3167 exit_block_map->aux = NULL;
3169 if (id->eh_map)
3171 delete id->eh_map;
3172 id->eh_map = NULL;
3174 if (id->dependence_map)
3176 delete id->dependence_map;
3177 id->dependence_map = NULL;
3180 return new_fndecl;
3183 /* Copy the debug STMT using ID. We deal with these statements in a
3184 special way: if any variable in their VALUE expression wasn't
3185 remapped yet, we won't remap it, because that would get decl uids
3186 out of sync, causing codegen differences between -g and -g0. If
3187 this arises, we drop the VALUE expression altogether. */
3189 static void
3190 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3192 tree t, *n;
3193 struct walk_stmt_info wi;
3195 if (tree block = gimple_block (stmt))
3197 n = id->decl_map->get (block);
3198 gimple_set_block (stmt, n ? *n : id->block);
3201 if (gimple_debug_nonbind_marker_p (stmt))
3203 if (id->call_stmt && !gimple_block (stmt))
3205 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3206 gsi_remove (&gsi, true);
3208 return;
3211 /* Remap all the operands in COPY. */
3212 memset (&wi, 0, sizeof (wi));
3213 wi.info = id;
3215 processing_debug_stmt = 1;
3217 if (gimple_debug_source_bind_p (stmt))
3218 t = gimple_debug_source_bind_get_var (stmt);
3219 else if (gimple_debug_bind_p (stmt))
3220 t = gimple_debug_bind_get_var (stmt);
3221 else
3222 gcc_unreachable ();
3224 if (TREE_CODE (t) == PARM_DECL
3225 && id->debug_map
3226 && (n = id->debug_map->get (t)))
3228 gcc_assert (VAR_P (*n));
3229 t = *n;
3231 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3232 /* T is a non-localized variable. */;
3233 else
3234 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3236 if (gimple_debug_bind_p (stmt))
3238 gimple_debug_bind_set_var (stmt, t);
3240 if (gimple_debug_bind_has_value_p (stmt))
3241 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3242 remap_gimple_op_r, &wi, NULL);
3244 /* Punt if any decl couldn't be remapped. */
3245 if (processing_debug_stmt < 0)
3246 gimple_debug_bind_reset_value (stmt);
3248 else if (gimple_debug_source_bind_p (stmt))
3250 gimple_debug_source_bind_set_var (stmt, t);
3251 /* When inlining and source bind refers to one of the optimized
3252 away parameters, change the source bind into normal debug bind
3253 referring to the corresponding DEBUG_EXPR_DECL that should have
3254 been bound before the call stmt. */
3255 t = gimple_debug_source_bind_get_value (stmt);
3256 if (t != NULL_TREE
3257 && TREE_CODE (t) == PARM_DECL
3258 && id->call_stmt)
3260 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3261 unsigned int i;
3262 if (debug_args != NULL)
3264 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3265 if ((**debug_args)[i] == DECL_ORIGIN (t)
3266 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3268 t = (**debug_args)[i + 1];
3269 stmt->subcode = GIMPLE_DEBUG_BIND;
3270 gimple_debug_bind_set_value (stmt, t);
3271 break;
3275 if (gimple_debug_source_bind_p (stmt))
3276 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3277 remap_gimple_op_r, &wi, NULL);
3280 processing_debug_stmt = 0;
3282 update_stmt (stmt);
3285 /* Process deferred debug stmts. In order to give values better odds
3286 of being successfully remapped, we delay the processing of debug
3287 stmts until all other stmts that might require remapping are
3288 processed. */
3290 static void
3291 copy_debug_stmts (copy_body_data *id)
3293 if (!id->debug_stmts.exists ())
3294 return;
3296 for (gdebug *stmt : id->debug_stmts)
3297 copy_debug_stmt (stmt, id);
3299 id->debug_stmts.release ();
3302 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3303 another function. */
3305 static tree
3306 copy_tree_body (copy_body_data *id)
3308 tree fndecl = id->src_fn;
3309 tree body = DECL_SAVED_TREE (fndecl);
3311 walk_tree (&body, copy_tree_body_r, id, NULL);
3313 return body;
3316 /* Make a copy of the body of FN so that it can be inserted inline in
3317 another function. */
3319 static tree
3320 copy_body (copy_body_data *id,
3321 basic_block entry_block_map, basic_block exit_block_map,
3322 basic_block new_entry)
3324 tree fndecl = id->src_fn;
3325 tree body;
3327 /* If this body has a CFG, walk CFG and copy. */
3328 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3329 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3330 new_entry);
3331 copy_debug_stmts (id);
3332 delete id->killed_new_ssa_names;
3333 id->killed_new_ssa_names = NULL;
3335 return body;
3338 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3339 defined in function FN, or of a data member thereof. */
3341 static bool
3342 self_inlining_addr_expr (tree value, tree fn)
3344 tree var;
3346 if (TREE_CODE (value) != ADDR_EXPR)
3347 return false;
3349 var = get_base_address (TREE_OPERAND (value, 0));
3351 return var && auto_var_in_fn_p (var, fn);
3354 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3355 lexical block and line number information from base_stmt, if given,
3356 or from the last stmt of the block otherwise. */
3358 static gimple *
3359 insert_init_debug_bind (copy_body_data *id,
3360 basic_block bb, tree var, tree value,
3361 gimple *base_stmt)
3363 gimple *note;
3364 gimple_stmt_iterator gsi;
3365 tree tracked_var;
3367 if (!gimple_in_ssa_p (id->src_cfun))
3368 return NULL;
3370 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3371 return NULL;
3373 tracked_var = target_for_debug_bind (var);
3374 if (!tracked_var)
3375 return NULL;
3377 if (bb)
3379 gsi = gsi_last_bb (bb);
3380 if (!base_stmt && !gsi_end_p (gsi))
3381 base_stmt = gsi_stmt (gsi);
3384 note = gimple_build_debug_bind (tracked_var,
3385 value == error_mark_node
3386 ? NULL_TREE : unshare_expr (value),
3387 base_stmt);
3389 if (bb)
3391 if (!gsi_end_p (gsi))
3392 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3393 else
3394 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3397 return note;
3400 static void
3401 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3403 /* If VAR represents a zero-sized variable, it's possible that the
3404 assignment statement may result in no gimple statements. */
3405 if (init_stmt)
3407 gimple_stmt_iterator si = gsi_last_bb (bb);
3409 /* We can end up with init statements that store to a non-register
3410 from a rhs with a conversion. Handle that here by forcing the
3411 rhs into a temporary. gimple_regimplify_operands is not
3412 prepared to do this for us. */
3413 if (!is_gimple_debug (init_stmt)
3414 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3415 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3416 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3418 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3419 TREE_TYPE (gimple_assign_lhs (init_stmt)),
3420 gimple_assign_rhs1 (init_stmt));
3421 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3422 GSI_NEW_STMT);
3423 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3424 gimple_assign_set_rhs1 (init_stmt, rhs);
3426 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3427 if (!is_gimple_debug (init_stmt))
3429 gimple_regimplify_operands (init_stmt, &si);
3431 tree def = gimple_assign_lhs (init_stmt);
3432 insert_init_debug_bind (id, bb, def, def, init_stmt);
3437 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3438 if need be (which should only be necessary for invalid programs). Attempt
3439 to convert VAL to TYPE and return the result if it is possible, just return
3440 a zero constant of the given type if it fails. */
3442 tree
3443 force_value_to_type (tree type, tree value)
3445 /* If we can match up types by promotion/demotion do so. */
3446 if (fold_convertible_p (type, value))
3447 return fold_convert (type, value);
3449 /* ??? For valid programs we should not end up here.
3450 Still if we end up with truly mismatched types here, fall back
3451 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3452 GIMPLE to the following passes. */
3453 if (TREE_CODE (value) == WITH_SIZE_EXPR)
3454 return error_mark_node;
3455 else if (!is_gimple_reg_type (TREE_TYPE (value))
3456 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3457 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3458 else
3459 return build_zero_cst (type);
3462 /* Initialize parameter P with VALUE. If needed, produce init statement
3463 at the end of BB. When BB is NULL, we return init statement to be
3464 output later. */
3465 static gimple *
3466 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3467 basic_block bb, tree *vars)
3469 gimple *init_stmt = NULL;
3470 tree var;
3471 tree def = (gimple_in_ssa_p (cfun)
3472 ? ssa_default_def (id->src_cfun, p) : NULL);
3474 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3475 here since the type of this decl must be visible to the calling
3476 function. */
3477 var = copy_decl_to_var (p, id);
3479 /* Declare this new variable. */
3480 DECL_CHAIN (var) = *vars;
3481 *vars = var;
3483 /* Make gimplifier happy about this variable. */
3484 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3486 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3487 we would not need to create a new variable here at all, if it
3488 weren't for debug info. Still, we can just use the argument
3489 value. */
3490 if (TREE_READONLY (p)
3491 && !TREE_ADDRESSABLE (p)
3492 && value
3493 && !TREE_SIDE_EFFECTS (value)
3494 && !def)
3496 /* We may produce non-gimple trees by adding NOPs or introduce invalid
3497 sharing when the value is not constant or DECL. And we need to make
3498 sure that it cannot be modified from another path in the callee. */
3499 if (((is_gimple_min_invariant (value)
3500 /* When the parameter is used in a context that forces it to
3501 not be a GIMPLE register avoid substituting something that
3502 is not a decl there. */
3503 && ! DECL_NOT_GIMPLE_REG_P (p))
3504 || (DECL_P (value) && TREE_READONLY (value))
3505 || (auto_var_in_fn_p (value, id->dst_fn)
3506 && !TREE_ADDRESSABLE (value)))
3507 && useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))
3508 /* We have to be very careful about ADDR_EXPR. Make sure
3509 the base variable isn't a local variable of the inlined
3510 function, e.g., when doing recursive inlining, direct or
3511 mutually-recursive or whatever, which is why we don't
3512 just test whether fn == current_function_decl. */
3513 && ! self_inlining_addr_expr (value, fn))
3515 insert_decl_map (id, p, value);
3516 if (!id->debug_map)
3517 id->debug_map = new hash_map<tree, tree>;
3518 id->debug_map->put (p, var);
3519 return insert_init_debug_bind (id, bb, var, value, NULL);
3523 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3524 that way, when the PARM_DECL is encountered, it will be
3525 automatically replaced by the VAR_DECL. */
3526 insert_decl_map (id, p, var);
3528 /* Even if P was TREE_READONLY, the new VAR should not be. In the original
3529 code, we would have constructed a temporary, and then the function body
3530 would have never changed the value of P. However, now, we will be
3531 constructing VAR directly. Therefore, it must not be TREE_READONLY. */
3532 TREE_READONLY (var) = 0;
3534 tree rhs = value;
3535 if (value
3536 && value != error_mark_node
3537 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3538 rhs = force_value_to_type (TREE_TYPE (p), value);
3540 /* If there is no setup required and we are in SSA, take the easy route
3541 replacing all SSA names representing the function parameter by the
3542 SSA name passed to function.
3544 We need to construct map for the variable anyway as it might be used
3545 in different SSA names when parameter is set in function.
3547 Do replacement at -O0 for const arguments replaced by constant.
3548 This is important for builtin_constant_p and other construct requiring
3549 constant argument to be visible in inlined function body. */
3550 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3551 && (optimize
3552 || (TREE_READONLY (p)
3553 && is_gimple_min_invariant (rhs)))
3554 && (TREE_CODE (rhs) == SSA_NAME
3555 || is_gimple_min_invariant (rhs))
3556 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3558 insert_decl_map (id, def, rhs);
3559 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3562 /* If the value of argument is never used, don't care about initializing
3563 it. */
3564 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3566 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3567 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3570 /* Initialize this VAR_DECL from the equivalent argument. Convert
3571 the argument to the proper type in case it was promoted. */
3572 if (value)
3574 if (rhs == error_mark_node)
3576 insert_decl_map (id, p, var);
3577 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3580 STRIP_USELESS_TYPE_CONVERSION (rhs);
3582 /* If we are in SSA form properly remap the default definition
3583 or assign to a dummy SSA name if the parameter is unused and
3584 we are not optimizing. */
3585 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3587 if (def)
3589 def = remap_ssa_name (def, id);
3590 init_stmt = gimple_build_assign (def, rhs);
3591 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3592 set_ssa_default_def (cfun, var, NULL);
3594 else if (!optimize)
3596 def = make_ssa_name (var);
3597 init_stmt = gimple_build_assign (def, rhs);
3600 else
3601 init_stmt = gimple_build_assign (var, rhs);
3603 if (bb && init_stmt)
3604 insert_init_stmt (id, bb, init_stmt);
3606 return init_stmt;
3609 /* Generate code to initialize the parameters of the function at the
3610 top of the stack in ID from the GIMPLE_CALL STMT. */
3612 static void
3613 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3614 tree fn, basic_block bb)
3616 tree parms;
3617 size_t i;
3618 tree p;
3619 tree vars = NULL_TREE;
3620 tree static_chain = gimple_call_chain (stmt);
3622 /* Figure out what the parameters are. */
3623 parms = DECL_ARGUMENTS (fn);
3625 /* Loop through the parameter declarations, replacing each with an
3626 equivalent VAR_DECL, appropriately initialized. */
3627 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3629 tree val;
3630 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3631 setup_one_parameter (id, p, val, fn, bb, &vars);
3633 /* After remapping parameters remap their types. This has to be done
3634 in a second loop over all parameters to appropriately remap
3635 variable sized arrays when the size is specified in a
3636 parameter following the array. */
3637 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3639 tree *varp = id->decl_map->get (p);
3640 if (varp && VAR_P (*varp))
3642 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3643 ? ssa_default_def (id->src_cfun, p) : NULL);
3644 tree var = *varp;
3645 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3646 /* Also remap the default definition if it was remapped
3647 to the default definition of the parameter replacement
3648 by the parameter setup. */
3649 if (def)
3651 tree *defp = id->decl_map->get (def);
3652 if (defp
3653 && TREE_CODE (*defp) == SSA_NAME
3654 && SSA_NAME_VAR (*defp) == var)
3655 TREE_TYPE (*defp) = TREE_TYPE (var);
3660 /* Initialize the static chain. */
3661 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3662 gcc_assert (fn != current_function_decl);
3663 if (p)
3665 /* No static chain? Seems like a bug in tree-nested.c. */
3666 gcc_assert (static_chain);
3668 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3671 declare_inline_vars (id->block, vars);
3675 /* Declare a return variable to replace the RESULT_DECL for the
3676 function we are calling. An appropriate DECL_STMT is returned.
3677 The USE_STMT is filled to contain a use of the declaration to
3678 indicate the return value of the function.
3680 RETURN_SLOT, if non-null is place where to store the result. It
3681 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3682 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3684 The return value is a (possibly null) value that holds the result
3685 as seen by the caller. */
3687 static tree
3688 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3689 basic_block entry_bb)
3691 tree callee = id->src_fn;
3692 tree result = DECL_RESULT (callee);
3693 tree callee_type = TREE_TYPE (result);
3694 tree caller_type;
3695 tree var, use;
3697 /* Handle type-mismatches in the function declaration return type
3698 vs. the call expression. */
3699 if (modify_dest)
3700 caller_type = TREE_TYPE (modify_dest);
3701 else if (return_slot)
3702 caller_type = TREE_TYPE (return_slot);
3703 else /* No LHS on the call. */
3704 caller_type = TREE_TYPE (TREE_TYPE (callee));
3706 /* We don't need to do anything for functions that don't return anything. */
3707 if (VOID_TYPE_P (callee_type))
3708 return NULL_TREE;
3710 /* If there was a return slot, then the return value is the
3711 dereferenced address of that object. */
3712 if (return_slot)
3714 /* The front end shouldn't have used both return_slot and
3715 a modify expression. */
3716 gcc_assert (!modify_dest);
3717 if (DECL_BY_REFERENCE (result))
3719 tree return_slot_addr = build_fold_addr_expr (return_slot);
3720 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3722 /* We are going to construct *&return_slot and we can't do that
3723 for variables believed to be not addressable.
3725 FIXME: This check possibly can match, because values returned
3726 via return slot optimization are not believed to have address
3727 taken by alias analysis. */
3728 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3729 var = return_slot_addr;
3730 mark_addressable (return_slot);
3732 else
3734 var = return_slot;
3735 gcc_assert (TREE_CODE (var) != SSA_NAME);
3736 if (TREE_ADDRESSABLE (result))
3737 mark_addressable (var);
3739 if (DECL_NOT_GIMPLE_REG_P (result)
3740 && DECL_P (var))
3741 DECL_NOT_GIMPLE_REG_P (var) = 1;
3743 if (!useless_type_conversion_p (callee_type, caller_type))
3744 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3746 use = NULL;
3747 goto done;
3750 /* All types requiring non-trivial constructors should have been handled. */
3751 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3753 /* Attempt to avoid creating a new temporary variable. */
3754 if (modify_dest
3755 && TREE_CODE (modify_dest) != SSA_NAME)
3757 bool use_it = false;
3759 /* We can't use MODIFY_DEST if there's type promotion involved. */
3760 if (!useless_type_conversion_p (callee_type, caller_type))
3761 use_it = false;
3763 /* ??? If we're assigning to a variable sized type, then we must
3764 reuse the destination variable, because we've no good way to
3765 create variable sized temporaries at this point. */
3766 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3767 use_it = true;
3769 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3770 reuse it as the result of the call directly. Don't do this if
3771 it would promote MODIFY_DEST to addressable. */
3772 else if (TREE_ADDRESSABLE (result))
3773 use_it = false;
3774 else
3776 tree base_m = get_base_address (modify_dest);
3778 /* If the base isn't a decl, then it's a pointer, and we don't
3779 know where that's going to go. */
3780 if (!DECL_P (base_m))
3781 use_it = false;
3782 else if (is_global_var (base_m))
3783 use_it = false;
3784 else if (DECL_NOT_GIMPLE_REG_P (result)
3785 && !DECL_NOT_GIMPLE_REG_P (base_m))
3786 use_it = false;
3787 else if (!TREE_ADDRESSABLE (base_m))
3788 use_it = true;
3791 if (use_it)
3793 var = modify_dest;
3794 use = NULL;
3795 goto done;
3799 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3801 var = copy_result_decl_to_var (result, id);
3802 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3804 /* Do not have the rest of GCC warn about this variable as it should
3805 not be visible to the user. */
3806 suppress_warning (var /* OPT_Wuninitialized? */);
3808 declare_inline_vars (id->block, var);
3810 /* Build the use expr. If the return type of the function was
3811 promoted, convert it back to the expected type. */
3812 use = var;
3813 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3815 /* If we can match up types by promotion/demotion do so. */
3816 if (fold_convertible_p (caller_type, var))
3817 use = fold_convert (caller_type, var);
3818 else
3820 /* ??? For valid programs we should not end up here.
3821 Still if we end up with truly mismatched types here, fall back
3822 to using a MEM_REF to not leak invalid GIMPLE to the following
3823 passes. */
3824 /* Prevent var from being written into SSA form. */
3825 if (is_gimple_reg_type (TREE_TYPE (var)))
3826 DECL_NOT_GIMPLE_REG_P (var) = true;
3827 use = fold_build2 (MEM_REF, caller_type,
3828 build_fold_addr_expr (var),
3829 build_int_cst (ptr_type_node, 0));
3833 STRIP_USELESS_TYPE_CONVERSION (use);
3835 if (DECL_BY_REFERENCE (result))
3837 TREE_ADDRESSABLE (var) = 1;
3838 var = build_fold_addr_expr (var);
3841 done:
3842 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3843 way, when the RESULT_DECL is encountered, it will be
3844 automatically replaced by the VAR_DECL.
3846 When returning by reference, ensure that RESULT_DECL remaps to
3847 gimple_val. */
3848 if (DECL_BY_REFERENCE (result)
3849 && !is_gimple_val (var))
3851 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3852 insert_decl_map (id, result, temp);
3853 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3854 it's default_def SSA_NAME. */
3855 if (gimple_in_ssa_p (id->src_cfun)
3856 && is_gimple_reg (result))
3858 temp = make_ssa_name (temp);
3859 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3861 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3863 else
3864 insert_decl_map (id, result, var);
3866 /* Remember this so we can ignore it in remap_decls. */
3867 id->retvar = var;
3868 return use;
3871 /* Determine if the function can be copied. If so return NULL. If
3872 not return a string describng the reason for failure. */
3874 const char *
3875 copy_forbidden (struct function *fun)
3877 const char *reason = fun->cannot_be_copied_reason;
3879 /* Only examine the function once. */
3880 if (fun->cannot_be_copied_set)
3881 return reason;
3883 /* We cannot copy a function that receives a non-local goto
3884 because we cannot remap the destination label used in the
3885 function that is performing the non-local goto. */
3886 /* ??? Actually, this should be possible, if we work at it.
3887 No doubt there's just a handful of places that simply
3888 assume it doesn't happen and don't substitute properly. */
3889 if (fun->has_nonlocal_label)
3891 reason = G_("function %q+F can never be copied "
3892 "because it receives a non-local goto");
3893 goto fail;
3896 if (fun->has_forced_label_in_static)
3898 reason = G_("function %q+F can never be copied because it saves "
3899 "address of local label in a static variable");
3900 goto fail;
3903 fail:
3904 fun->cannot_be_copied_reason = reason;
3905 fun->cannot_be_copied_set = true;
3906 return reason;
3910 static const char *inline_forbidden_reason;
3912 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3913 iff a function cannot be inlined. Also sets the reason why. */
3915 static tree
3916 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3917 struct walk_stmt_info *wip)
3919 tree fn = (tree) wip->info;
3920 tree t;
3921 gimple *stmt = gsi_stmt (*gsi);
3923 switch (gimple_code (stmt))
3925 case GIMPLE_CALL:
3926 /* Refuse to inline alloca call unless user explicitly forced so as
3927 this may change program's memory overhead drastically when the
3928 function using alloca is called in loop. In GCC present in
3929 SPEC2000 inlining into schedule_block cause it to require 2GB of
3930 RAM instead of 256MB. Don't do so for alloca calls emitted for
3931 VLA objects as those can't cause unbounded growth (they're always
3932 wrapped inside stack_save/stack_restore regions. */
3933 if (gimple_maybe_alloca_call_p (stmt)
3934 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3935 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3937 inline_forbidden_reason
3938 = G_("function %q+F can never be inlined because it uses "
3939 "alloca (override using the always_inline attribute)");
3940 *handled_ops_p = true;
3941 return fn;
3944 t = gimple_call_fndecl (stmt);
3945 if (t == NULL_TREE)
3946 break;
3948 /* We cannot inline functions that call setjmp. */
3949 if (setjmp_call_p (t))
3951 inline_forbidden_reason
3952 = G_("function %q+F can never be inlined because it uses setjmp");
3953 *handled_ops_p = true;
3954 return t;
3957 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3958 switch (DECL_FUNCTION_CODE (t))
3960 /* We cannot inline functions that take a variable number of
3961 arguments. */
3962 case BUILT_IN_VA_START:
3963 case BUILT_IN_NEXT_ARG:
3964 case BUILT_IN_VA_END:
3965 inline_forbidden_reason
3966 = G_("function %q+F can never be inlined because it "
3967 "uses variable argument lists");
3968 *handled_ops_p = true;
3969 return t;
3971 case BUILT_IN_LONGJMP:
3972 /* We can't inline functions that call __builtin_longjmp at
3973 all. The non-local goto machinery really requires the
3974 destination be in a different function. If we allow the
3975 function calling __builtin_longjmp to be inlined into the
3976 function calling __builtin_setjmp, Things will Go Awry. */
3977 inline_forbidden_reason
3978 = G_("function %q+F can never be inlined because "
3979 "it uses setjmp-longjmp exception handling");
3980 *handled_ops_p = true;
3981 return t;
3983 case BUILT_IN_NONLOCAL_GOTO:
3984 /* Similarly. */
3985 inline_forbidden_reason
3986 = G_("function %q+F can never be inlined because "
3987 "it uses non-local goto");
3988 *handled_ops_p = true;
3989 return t;
3991 case BUILT_IN_RETURN:
3992 case BUILT_IN_APPLY_ARGS:
3993 /* If a __builtin_apply_args caller would be inlined,
3994 it would be saving arguments of the function it has
3995 been inlined into. Similarly __builtin_return would
3996 return from the function the inline has been inlined into. */
3997 inline_forbidden_reason
3998 = G_("function %q+F can never be inlined because "
3999 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
4000 *handled_ops_p = true;
4001 return t;
4003 default:
4004 break;
4006 break;
4008 case GIMPLE_GOTO:
4009 t = gimple_goto_dest (stmt);
4011 /* We will not inline a function which uses computed goto. The
4012 addresses of its local labels, which may be tucked into
4013 global storage, are of course not constant across
4014 instantiations, which causes unexpected behavior. */
4015 if (TREE_CODE (t) != LABEL_DECL)
4017 inline_forbidden_reason
4018 = G_("function %q+F can never be inlined "
4019 "because it contains a computed goto");
4020 *handled_ops_p = true;
4021 return t;
4023 break;
4025 default:
4026 break;
4029 *handled_ops_p = false;
4030 return NULL_TREE;
4033 /* Return true if FNDECL is a function that cannot be inlined into
4034 another one. */
4036 static bool
4037 inline_forbidden_p (tree fndecl)
4039 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
4040 struct walk_stmt_info wi;
4041 basic_block bb;
4042 bool forbidden_p = false;
4044 /* First check for shared reasons not to copy the code. */
4045 inline_forbidden_reason = copy_forbidden (fun);
4046 if (inline_forbidden_reason != NULL)
4047 return true;
4049 /* Next, walk the statements of the function looking for
4050 constraucts we can't handle, or are non-optimal for inlining. */
4051 hash_set<tree> visited_nodes;
4052 memset (&wi, 0, sizeof (wi));
4053 wi.info = (void *) fndecl;
4054 wi.pset = &visited_nodes;
4056 /* We cannot inline a function with a variable-sized parameter because we
4057 cannot materialize a temporary of such a type in the caller if need be.
4058 Note that the return case is not symmetrical because we can guarantee
4059 that a temporary is not needed by means of CALL_EXPR_RETURN_SLOT_OPT. */
4060 for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
4061 if (!poly_int_tree_p (DECL_SIZE (parm)))
4063 inline_forbidden_reason
4064 = G_("function %q+F can never be inlined because "
4065 "it has a VLA argument");
4066 return true;
4069 FOR_EACH_BB_FN (bb, fun)
4071 gimple *ret;
4072 gimple_seq seq = bb_seq (bb);
4073 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4074 forbidden_p = (ret != NULL);
4075 if (forbidden_p)
4076 break;
4079 return forbidden_p;
4082 /* Return false if the function FNDECL cannot be inlined on account of its
4083 attributes, true otherwise. */
4084 static bool
4085 function_attribute_inlinable_p (const_tree fndecl)
4087 if (targetm.attribute_table)
4089 const_tree a;
4091 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4093 const_tree name = get_attribute_name (a);
4094 int i;
4096 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4097 if (is_attribute_p (targetm.attribute_table[i].name, name))
4098 return targetm.function_attribute_inlinable_p (fndecl);
4102 return true;
4105 /* Returns nonzero if FN is a function that does not have any
4106 fundamental inline blocking properties. */
4108 bool
4109 tree_inlinable_function_p (tree fn)
4111 bool inlinable = true;
4112 bool do_warning;
4113 tree always_inline;
4115 /* If we've already decided this function shouldn't be inlined,
4116 there's no need to check again. */
4117 if (DECL_UNINLINABLE (fn))
4118 return false;
4120 /* We only warn for functions declared `inline' by the user. */
4121 do_warning = (opt_for_fn (fn, warn_inline)
4122 && DECL_DECLARED_INLINE_P (fn)
4123 && !DECL_NO_INLINE_WARNING_P (fn)
4124 && !DECL_IN_SYSTEM_HEADER (fn));
4126 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4128 if (flag_no_inline
4129 && always_inline == NULL)
4131 if (do_warning)
4132 warning (OPT_Winline, "function %q+F can never be inlined because it "
4133 "is suppressed using %<-fno-inline%>", fn);
4134 inlinable = false;
4137 else if (!function_attribute_inlinable_p (fn))
4139 if (do_warning)
4140 warning (OPT_Winline, "function %q+F can never be inlined because it "
4141 "uses attributes conflicting with inlining", fn);
4142 inlinable = false;
4145 else if (inline_forbidden_p (fn))
4147 /* See if we should warn about uninlinable functions. Previously,
4148 some of these warnings would be issued while trying to expand
4149 the function inline, but that would cause multiple warnings
4150 about functions that would for example call alloca. But since
4151 this a property of the function, just one warning is enough.
4152 As a bonus we can now give more details about the reason why a
4153 function is not inlinable. */
4154 if (always_inline)
4155 error (inline_forbidden_reason, fn);
4156 else if (do_warning)
4157 warning (OPT_Winline, inline_forbidden_reason, fn);
4159 inlinable = false;
4162 /* Squirrel away the result so that we don't have to check again. */
4163 DECL_UNINLINABLE (fn) = !inlinable;
4165 return inlinable;
4168 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4169 word size and take possible memcpy call into account and return
4170 cost based on whether optimizing for size or speed according to SPEED_P. */
4173 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4175 HOST_WIDE_INT size;
4177 gcc_assert (!VOID_TYPE_P (type));
4179 if (TREE_CODE (type) == VECTOR_TYPE)
4181 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4182 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4183 int orig_mode_size
4184 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4185 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4186 return ((orig_mode_size + simd_mode_size - 1)
4187 / simd_mode_size);
4190 size = int_size_in_bytes (type);
4192 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4193 /* Cost of a memcpy call, 3 arguments and the call. */
4194 return 4;
4195 else
4196 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4199 /* Returns cost of operation CODE, according to WEIGHTS */
4201 static int
4202 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4203 tree op1 ATTRIBUTE_UNUSED, tree op2)
4205 switch (code)
4207 /* These are "free" conversions, or their presumed cost
4208 is folded into other operations. */
4209 case RANGE_EXPR:
4210 CASE_CONVERT:
4211 case COMPLEX_EXPR:
4212 case PAREN_EXPR:
4213 case VIEW_CONVERT_EXPR:
4214 return 0;
4216 /* Assign cost of 1 to usual operations.
4217 ??? We may consider mapping RTL costs to this. */
4218 case COND_EXPR:
4219 case VEC_COND_EXPR:
4220 case VEC_PERM_EXPR:
4222 case PLUS_EXPR:
4223 case POINTER_PLUS_EXPR:
4224 case POINTER_DIFF_EXPR:
4225 case MINUS_EXPR:
4226 case MULT_EXPR:
4227 case MULT_HIGHPART_EXPR:
4229 case ADDR_SPACE_CONVERT_EXPR:
4230 case FIXED_CONVERT_EXPR:
4231 case FIX_TRUNC_EXPR:
4233 case NEGATE_EXPR:
4234 case FLOAT_EXPR:
4235 case MIN_EXPR:
4236 case MAX_EXPR:
4237 case ABS_EXPR:
4238 case ABSU_EXPR:
4240 case LSHIFT_EXPR:
4241 case RSHIFT_EXPR:
4242 case LROTATE_EXPR:
4243 case RROTATE_EXPR:
4245 case BIT_IOR_EXPR:
4246 case BIT_XOR_EXPR:
4247 case BIT_AND_EXPR:
4248 case BIT_NOT_EXPR:
4250 case TRUTH_ANDIF_EXPR:
4251 case TRUTH_ORIF_EXPR:
4252 case TRUTH_AND_EXPR:
4253 case TRUTH_OR_EXPR:
4254 case TRUTH_XOR_EXPR:
4255 case TRUTH_NOT_EXPR:
4257 case LT_EXPR:
4258 case LE_EXPR:
4259 case GT_EXPR:
4260 case GE_EXPR:
4261 case EQ_EXPR:
4262 case NE_EXPR:
4263 case ORDERED_EXPR:
4264 case UNORDERED_EXPR:
4266 case UNLT_EXPR:
4267 case UNLE_EXPR:
4268 case UNGT_EXPR:
4269 case UNGE_EXPR:
4270 case UNEQ_EXPR:
4271 case LTGT_EXPR:
4273 case CONJ_EXPR:
4275 case PREDECREMENT_EXPR:
4276 case PREINCREMENT_EXPR:
4277 case POSTDECREMENT_EXPR:
4278 case POSTINCREMENT_EXPR:
4280 case REALIGN_LOAD_EXPR:
4282 case WIDEN_PLUS_EXPR:
4283 case WIDEN_MINUS_EXPR:
4284 case WIDEN_SUM_EXPR:
4285 case WIDEN_MULT_EXPR:
4286 case DOT_PROD_EXPR:
4287 case SAD_EXPR:
4288 case WIDEN_MULT_PLUS_EXPR:
4289 case WIDEN_MULT_MINUS_EXPR:
4290 case WIDEN_LSHIFT_EXPR:
4292 case VEC_WIDEN_PLUS_HI_EXPR:
4293 case VEC_WIDEN_PLUS_LO_EXPR:
4294 case VEC_WIDEN_MINUS_HI_EXPR:
4295 case VEC_WIDEN_MINUS_LO_EXPR:
4296 case VEC_WIDEN_MULT_HI_EXPR:
4297 case VEC_WIDEN_MULT_LO_EXPR:
4298 case VEC_WIDEN_MULT_EVEN_EXPR:
4299 case VEC_WIDEN_MULT_ODD_EXPR:
4300 case VEC_UNPACK_HI_EXPR:
4301 case VEC_UNPACK_LO_EXPR:
4302 case VEC_UNPACK_FLOAT_HI_EXPR:
4303 case VEC_UNPACK_FLOAT_LO_EXPR:
4304 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4305 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4306 case VEC_PACK_TRUNC_EXPR:
4307 case VEC_PACK_SAT_EXPR:
4308 case VEC_PACK_FIX_TRUNC_EXPR:
4309 case VEC_PACK_FLOAT_EXPR:
4310 case VEC_WIDEN_LSHIFT_HI_EXPR:
4311 case VEC_WIDEN_LSHIFT_LO_EXPR:
4312 case VEC_DUPLICATE_EXPR:
4313 case VEC_SERIES_EXPR:
4315 return 1;
4317 /* Few special cases of expensive operations. This is useful
4318 to avoid inlining on functions having too many of these. */
4319 case TRUNC_DIV_EXPR:
4320 case CEIL_DIV_EXPR:
4321 case FLOOR_DIV_EXPR:
4322 case ROUND_DIV_EXPR:
4323 case EXACT_DIV_EXPR:
4324 case TRUNC_MOD_EXPR:
4325 case CEIL_MOD_EXPR:
4326 case FLOOR_MOD_EXPR:
4327 case ROUND_MOD_EXPR:
4328 case RDIV_EXPR:
4329 if (TREE_CODE (op2) != INTEGER_CST)
4330 return weights->div_mod_cost;
4331 return 1;
4333 /* Bit-field insertion needs several shift and mask operations. */
4334 case BIT_INSERT_EXPR:
4335 return 3;
4337 default:
4338 /* We expect a copy assignment with no operator. */
4339 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4340 return 0;
4345 /* Estimate number of instructions that will be created by expanding
4346 the statements in the statement sequence STMTS.
4347 WEIGHTS contains weights attributed to various constructs. */
4350 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4352 int cost;
4353 gimple_stmt_iterator gsi;
4355 cost = 0;
4356 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4357 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4359 return cost;
4363 /* Estimate number of instructions that will be created by expanding STMT.
4364 WEIGHTS contains weights attributed to various constructs. */
4367 estimate_num_insns (gimple *stmt, eni_weights *weights)
4369 unsigned cost, i;
4370 enum gimple_code code = gimple_code (stmt);
4371 tree lhs;
4372 tree rhs;
4374 switch (code)
4376 case GIMPLE_ASSIGN:
4377 /* Try to estimate the cost of assignments. We have three cases to
4378 deal with:
4379 1) Simple assignments to registers;
4380 2) Stores to things that must live in memory. This includes
4381 "normal" stores to scalars, but also assignments of large
4382 structures, or constructors of big arrays;
4384 Let us look at the first two cases, assuming we have "a = b + C":
4385 <GIMPLE_ASSIGN <var_decl "a">
4386 <plus_expr <var_decl "b"> <constant C>>
4387 If "a" is a GIMPLE register, the assignment to it is free on almost
4388 any target, because "a" usually ends up in a real register. Hence
4389 the only cost of this expression comes from the PLUS_EXPR, and we
4390 can ignore the GIMPLE_ASSIGN.
4391 If "a" is not a GIMPLE register, the assignment to "a" will most
4392 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4393 of moving something into "a", which we compute using the function
4394 estimate_move_cost. */
4395 if (gimple_clobber_p (stmt))
4396 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4398 lhs = gimple_assign_lhs (stmt);
4399 rhs = gimple_assign_rhs1 (stmt);
4401 cost = 0;
4403 /* Account for the cost of moving to / from memory. */
4404 if (gimple_store_p (stmt))
4405 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4406 if (gimple_assign_load_p (stmt))
4407 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4409 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4410 gimple_assign_rhs1 (stmt),
4411 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4412 == GIMPLE_BINARY_RHS
4413 ? gimple_assign_rhs2 (stmt) : NULL);
4414 break;
4416 case GIMPLE_COND:
4417 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4418 gimple_op (stmt, 0),
4419 gimple_op (stmt, 1));
4420 break;
4422 case GIMPLE_SWITCH:
4424 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4425 /* Take into account cost of the switch + guess 2 conditional jumps for
4426 each case label.
4428 TODO: once the switch expansion logic is sufficiently separated, we can
4429 do better job on estimating cost of the switch. */
4430 if (weights->time_based)
4431 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4432 else
4433 cost = gimple_switch_num_labels (switch_stmt) * 2;
4435 break;
4437 case GIMPLE_CALL:
4439 tree decl;
4441 if (gimple_call_internal_p (stmt))
4442 return 0;
4443 else if ((decl = gimple_call_fndecl (stmt))
4444 && fndecl_built_in_p (decl))
4446 /* Do not special case builtins where we see the body.
4447 This just confuse inliner. */
4448 struct cgraph_node *node;
4449 if ((node = cgraph_node::get (decl))
4450 && node->definition)
4452 /* For buitins that are likely expanded to nothing or
4453 inlined do not account operand costs. */
4454 else if (is_simple_builtin (decl))
4455 return 0;
4456 else if (is_inexpensive_builtin (decl))
4457 return weights->target_builtin_call_cost;
4458 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4460 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4461 specialize the cheap expansion we do here.
4462 ??? This asks for a more general solution. */
4463 switch (DECL_FUNCTION_CODE (decl))
4465 case BUILT_IN_POW:
4466 case BUILT_IN_POWF:
4467 case BUILT_IN_POWL:
4468 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4469 && (real_equal
4470 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4471 &dconst2)))
4472 return estimate_operator_cost
4473 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4474 gimple_call_arg (stmt, 0));
4475 break;
4477 default:
4478 break;
4483 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4484 if (gimple_call_lhs (stmt))
4485 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4486 weights->time_based);
4487 for (i = 0; i < gimple_call_num_args (stmt); i++)
4489 tree arg = gimple_call_arg (stmt, i);
4490 cost += estimate_move_cost (TREE_TYPE (arg),
4491 weights->time_based);
4493 break;
4496 case GIMPLE_RETURN:
4497 return weights->return_cost;
4499 case GIMPLE_GOTO:
4500 case GIMPLE_LABEL:
4501 case GIMPLE_NOP:
4502 case GIMPLE_PHI:
4503 case GIMPLE_PREDICT:
4504 case GIMPLE_DEBUG:
4505 return 0;
4507 case GIMPLE_ASM:
4509 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4510 /* 1000 means infinity. This avoids overflows later
4511 with very long asm statements. */
4512 if (count > 1000)
4513 count = 1000;
4514 /* If this asm is asm inline, count anything as minimum size. */
4515 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4516 count = MIN (1, count);
4517 return MAX (1, count);
4520 case GIMPLE_RESX:
4521 /* This is either going to be an external function call with one
4522 argument, or two register copy statements plus a goto. */
4523 return 2;
4525 case GIMPLE_EH_DISPATCH:
4526 /* ??? This is going to turn into a switch statement. Ideally
4527 we'd have a look at the eh region and estimate the number of
4528 edges involved. */
4529 return 10;
4531 case GIMPLE_BIND:
4532 return estimate_num_insns_seq (
4533 gimple_bind_body (as_a <gbind *> (stmt)),
4534 weights);
4536 case GIMPLE_EH_FILTER:
4537 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4539 case GIMPLE_CATCH:
4540 return estimate_num_insns_seq (gimple_catch_handler (
4541 as_a <gcatch *> (stmt)),
4542 weights);
4544 case GIMPLE_TRY:
4545 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4546 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4548 /* OMP directives are generally very expensive. */
4550 case GIMPLE_OMP_RETURN:
4551 case GIMPLE_OMP_SECTIONS_SWITCH:
4552 case GIMPLE_OMP_ATOMIC_STORE:
4553 case GIMPLE_OMP_CONTINUE:
4554 /* ...except these, which are cheap. */
4555 return 0;
4557 case GIMPLE_OMP_ATOMIC_LOAD:
4558 return weights->omp_cost;
4560 case GIMPLE_OMP_FOR:
4561 return (weights->omp_cost
4562 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4563 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4565 case GIMPLE_OMP_PARALLEL:
4566 case GIMPLE_OMP_TASK:
4567 case GIMPLE_OMP_CRITICAL:
4568 case GIMPLE_OMP_MASTER:
4569 case GIMPLE_OMP_MASKED:
4570 case GIMPLE_OMP_SCOPE:
4571 case GIMPLE_OMP_TASKGROUP:
4572 case GIMPLE_OMP_ORDERED:
4573 case GIMPLE_OMP_SCAN:
4574 case GIMPLE_OMP_SECTION:
4575 case GIMPLE_OMP_SECTIONS:
4576 case GIMPLE_OMP_SINGLE:
4577 case GIMPLE_OMP_TARGET:
4578 case GIMPLE_OMP_TEAMS:
4579 return (weights->omp_cost
4580 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4582 case GIMPLE_TRANSACTION:
4583 return (weights->tm_cost
4584 + estimate_num_insns_seq (gimple_transaction_body (
4585 as_a <gtransaction *> (stmt)),
4586 weights));
4588 default:
4589 gcc_unreachable ();
4592 return cost;
4595 /* Estimate number of instructions that will be created by expanding
4596 function FNDECL. WEIGHTS contains weights attributed to various
4597 constructs. */
4600 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4602 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4603 gimple_stmt_iterator bsi;
4604 basic_block bb;
4605 int n = 0;
4607 gcc_assert (my_function && my_function->cfg);
4608 FOR_EACH_BB_FN (bb, my_function)
4610 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4611 n += estimate_num_insns (gsi_stmt (bsi), weights);
4614 return n;
4618 /* Initializes weights used by estimate_num_insns. */
4620 void
4621 init_inline_once (void)
4623 eni_size_weights.call_cost = 1;
4624 eni_size_weights.indirect_call_cost = 3;
4625 eni_size_weights.target_builtin_call_cost = 1;
4626 eni_size_weights.div_mod_cost = 1;
4627 eni_size_weights.omp_cost = 40;
4628 eni_size_weights.tm_cost = 10;
4629 eni_size_weights.time_based = false;
4630 eni_size_weights.return_cost = 1;
4632 /* Estimating time for call is difficult, since we have no idea what the
4633 called function does. In the current uses of eni_time_weights,
4634 underestimating the cost does less harm than overestimating it, so
4635 we choose a rather small value here. */
4636 eni_time_weights.call_cost = 10;
4637 eni_time_weights.indirect_call_cost = 15;
4638 eni_time_weights.target_builtin_call_cost = 1;
4639 eni_time_weights.div_mod_cost = 10;
4640 eni_time_weights.omp_cost = 40;
4641 eni_time_weights.tm_cost = 40;
4642 eni_time_weights.time_based = true;
4643 eni_time_weights.return_cost = 2;
4647 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4649 static void
4650 prepend_lexical_block (tree current_block, tree new_block)
4652 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4653 BLOCK_SUBBLOCKS (current_block) = new_block;
4654 BLOCK_SUPERCONTEXT (new_block) = current_block;
4657 /* Add local variables from CALLEE to CALLER. */
4659 static inline void
4660 add_local_variables (struct function *callee, struct function *caller,
4661 copy_body_data *id)
4663 tree var;
4664 unsigned ix;
4666 FOR_EACH_LOCAL_DECL (callee, ix, var)
4667 if (!can_be_nonlocal (var, id))
4669 tree new_var = remap_decl (var, id);
4671 /* Remap debug-expressions. */
4672 if (VAR_P (new_var)
4673 && DECL_HAS_DEBUG_EXPR_P (var)
4674 && new_var != var)
4676 tree tem = DECL_DEBUG_EXPR (var);
4677 bool old_regimplify = id->regimplify;
4678 id->remapping_type_depth++;
4679 walk_tree (&tem, copy_tree_body_r, id, NULL);
4680 id->remapping_type_depth--;
4681 id->regimplify = old_regimplify;
4682 SET_DECL_DEBUG_EXPR (new_var, tem);
4683 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4685 add_local_decl (caller, new_var);
4689 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4690 have brought in or introduced any debug stmts for SRCVAR. */
4692 static inline void
4693 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4695 tree *remappedvarp = id->decl_map->get (srcvar);
4697 if (!remappedvarp)
4698 return;
4700 if (!VAR_P (*remappedvarp))
4701 return;
4703 if (*remappedvarp == id->retvar)
4704 return;
4706 tree tvar = target_for_debug_bind (*remappedvarp);
4707 if (!tvar)
4708 return;
4710 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4711 id->call_stmt);
4712 gimple_seq_add_stmt (bindings, stmt);
4715 /* For each inlined variable for which we may have debug bind stmts,
4716 add before GSI a final debug stmt resetting it, marking the end of
4717 its life, so that var-tracking knows it doesn't have to compute
4718 further locations for it. */
4720 static inline void
4721 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4723 tree var;
4724 unsigned ix;
4725 gimple_seq bindings = NULL;
4727 if (!gimple_in_ssa_p (id->src_cfun))
4728 return;
4730 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4731 return;
4733 for (var = DECL_ARGUMENTS (id->src_fn);
4734 var; var = DECL_CHAIN (var))
4735 reset_debug_binding (id, var, &bindings);
4737 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4738 reset_debug_binding (id, var, &bindings);
4740 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4743 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4745 static bool
4746 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4747 bitmap to_purge)
4749 tree use_retvar;
4750 tree fn;
4751 hash_map<tree, tree> *dst;
4752 hash_map<tree, tree> *st = NULL;
4753 tree return_slot;
4754 tree modify_dest;
4755 struct cgraph_edge *cg_edge;
4756 cgraph_inline_failed_t reason;
4757 basic_block return_block;
4758 edge e;
4759 gimple_stmt_iterator gsi, stmt_gsi;
4760 bool successfully_inlined = false;
4761 bool purge_dead_abnormal_edges;
4762 gcall *call_stmt;
4763 unsigned int prop_mask, src_properties;
4764 struct function *dst_cfun;
4765 tree simduid;
4766 use_operand_p use;
4767 gimple *simtenter_stmt = NULL;
4768 vec<tree> *simtvars_save;
4770 /* The gimplifier uses input_location in too many places, such as
4771 internal_get_tmp_var (). */
4772 location_t saved_location = input_location;
4773 input_location = gimple_location (stmt);
4775 /* From here on, we're only interested in CALL_EXPRs. */
4776 call_stmt = dyn_cast <gcall *> (stmt);
4777 if (!call_stmt)
4778 goto egress;
4780 cg_edge = id->dst_node->get_edge (stmt);
4781 gcc_checking_assert (cg_edge);
4782 /* First, see if we can figure out what function is being called.
4783 If we cannot, then there is no hope of inlining the function. */
4784 if (cg_edge->indirect_unknown_callee)
4785 goto egress;
4786 fn = cg_edge->callee->decl;
4787 gcc_checking_assert (fn);
4789 /* If FN is a declaration of a function in a nested scope that was
4790 globally declared inline, we don't set its DECL_INITIAL.
4791 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4792 C++ front-end uses it for cdtors to refer to their internal
4793 declarations, that are not real functions. Fortunately those
4794 don't have trees to be saved, so we can tell by checking their
4795 gimple_body. */
4796 if (!DECL_INITIAL (fn)
4797 && DECL_ABSTRACT_ORIGIN (fn)
4798 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4799 fn = DECL_ABSTRACT_ORIGIN (fn);
4801 /* Don't try to inline functions that are not well-suited to inlining. */
4802 if (cg_edge->inline_failed)
4804 reason = cg_edge->inline_failed;
4805 /* If this call was originally indirect, we do not want to emit any
4806 inlining related warnings or sorry messages because there are no
4807 guarantees regarding those. */
4808 if (cg_edge->indirect_inlining_edge)
4809 goto egress;
4811 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4812 /* For extern inline functions that get redefined we always
4813 silently ignored always_inline flag. Better behavior would
4814 be to be able to keep both bodies and use extern inline body
4815 for inlining, but we can't do that because frontends overwrite
4816 the body. */
4817 && !cg_edge->callee->redefined_extern_inline
4818 /* During early inline pass, report only when optimization is
4819 not turned on. */
4820 && (symtab->global_info_ready
4821 || !optimize
4822 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4823 /* PR 20090218-1_0.c. Body can be provided by another module. */
4824 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4826 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4827 cgraph_inline_failed_string (reason));
4828 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4829 inform (gimple_location (stmt), "called from here");
4830 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4831 inform (DECL_SOURCE_LOCATION (cfun->decl),
4832 "called from this function");
4834 else if (opt_for_fn (fn, warn_inline)
4835 && DECL_DECLARED_INLINE_P (fn)
4836 && !DECL_NO_INLINE_WARNING_P (fn)
4837 && !DECL_IN_SYSTEM_HEADER (fn)
4838 && reason != CIF_UNSPECIFIED
4839 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4840 /* Do not warn about not inlined recursive calls. */
4841 && !cg_edge->recursive_p ()
4842 /* Avoid warnings during early inline pass. */
4843 && symtab->global_info_ready)
4845 auto_diagnostic_group d;
4846 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4847 fn, _(cgraph_inline_failed_string (reason))))
4849 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4850 inform (gimple_location (stmt), "called from here");
4851 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4852 inform (DECL_SOURCE_LOCATION (cfun->decl),
4853 "called from this function");
4856 goto egress;
4858 id->src_node = cg_edge->callee;
4860 /* If callee is thunk, all we need is to adjust the THIS pointer
4861 and redirect to function being thunked. */
4862 if (id->src_node->thunk)
4864 cgraph_edge *edge;
4865 tree virtual_offset = NULL;
4866 profile_count count = cg_edge->count;
4867 tree op;
4868 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4869 thunk_info *info = thunk_info::get (id->src_node);
4871 cgraph_edge::remove (cg_edge);
4872 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4873 gimple_uid (stmt),
4874 profile_count::one (),
4875 profile_count::one (),
4876 true);
4877 edge->count = count;
4878 if (info->virtual_offset_p)
4879 virtual_offset = size_int (info->virtual_value);
4880 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4881 NULL);
4882 gsi_insert_before (&iter, gimple_build_assign (op,
4883 gimple_call_arg (stmt, 0)),
4884 GSI_NEW_STMT);
4885 gcc_assert (info->this_adjusting);
4886 op = thunk_adjust (&iter, op, 1, info->fixed_offset,
4887 virtual_offset, info->indirect_offset);
4889 gimple_call_set_arg (stmt, 0, op);
4890 gimple_call_set_fndecl (stmt, edge->callee->decl);
4891 update_stmt (stmt);
4892 id->src_node->remove ();
4893 successfully_inlined = expand_call_inline (bb, stmt, id, to_purge);
4894 maybe_remove_unused_call_args (cfun, stmt);
4895 /* This used to return true even though we do fail to inline in
4896 some cases. See PR98525. */
4897 goto egress;
4899 fn = cg_edge->callee->decl;
4900 cg_edge->callee->get_untransformed_body ();
4902 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4903 cg_edge->callee->verify ();
4905 /* We will be inlining this callee. */
4906 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4908 /* Update the callers EH personality. */
4909 if (DECL_FUNCTION_PERSONALITY (fn))
4910 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4911 = DECL_FUNCTION_PERSONALITY (fn);
4913 /* Split the block before the GIMPLE_CALL. */
4914 stmt_gsi = gsi_for_stmt (stmt);
4915 gsi_prev (&stmt_gsi);
4916 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4917 bb = e->src;
4918 return_block = e->dest;
4919 remove_edge (e);
4921 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4922 been the source of abnormal edges. In this case, schedule
4923 the removal of dead abnormal edges. */
4924 gsi = gsi_start_bb (return_block);
4925 gsi_next (&gsi);
4926 purge_dead_abnormal_edges = gsi_end_p (gsi);
4928 stmt_gsi = gsi_start_bb (return_block);
4930 /* Build a block containing code to initialize the arguments, the
4931 actual inline expansion of the body, and a label for the return
4932 statements within the function to jump to. The type of the
4933 statement expression is the return type of the function call.
4934 ??? If the call does not have an associated block then we will
4935 remap all callee blocks to NULL, effectively dropping most of
4936 its debug information. This should only happen for calls to
4937 artificial decls inserted by the compiler itself. We need to
4938 either link the inlined blocks into the caller block tree or
4939 not refer to them in any way to not break GC for locations. */
4940 if (tree block = gimple_block (stmt))
4942 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4943 to make inlined_function_outer_scope_p return true on this BLOCK. */
4944 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4945 if (loc == UNKNOWN_LOCATION)
4946 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4947 if (loc == UNKNOWN_LOCATION)
4948 loc = BUILTINS_LOCATION;
4949 id->block = make_node (BLOCK);
4950 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4951 BLOCK_SOURCE_LOCATION (id->block) = loc;
4952 prepend_lexical_block (block, id->block);
4955 /* Local declarations will be replaced by their equivalents in this map. */
4956 st = id->decl_map;
4957 id->decl_map = new hash_map<tree, tree>;
4958 dst = id->debug_map;
4959 id->debug_map = NULL;
4960 if (flag_stack_reuse != SR_NONE)
4961 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4963 /* Record the function we are about to inline. */
4964 id->src_fn = fn;
4965 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4966 id->reset_location = DECL_IGNORED_P (fn);
4967 id->call_stmt = call_stmt;
4969 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4970 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4971 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4972 simtvars_save = id->dst_simt_vars;
4973 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4974 && (simduid = bb->loop_father->simduid) != NULL_TREE
4975 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4976 && single_imm_use (simduid, &use, &simtenter_stmt)
4977 && is_gimple_call (simtenter_stmt)
4978 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4979 vec_alloc (id->dst_simt_vars, 0);
4980 else
4981 id->dst_simt_vars = NULL;
4983 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4984 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4986 /* If the src function contains an IFN_VA_ARG, then so will the dst
4987 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4988 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4989 src_properties = id->src_cfun->curr_properties & prop_mask;
4990 if (src_properties != prop_mask)
4991 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4992 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4993 id->dst_node->calls_declare_variant_alt
4994 |= id->src_node->calls_declare_variant_alt;
4996 gcc_assert (!id->src_cfun->after_inlining);
4998 id->entry_bb = bb;
4999 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
5001 gimple_stmt_iterator si = gsi_last_bb (bb);
5002 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
5003 NOT_TAKEN),
5004 GSI_NEW_STMT);
5006 initialize_inlined_parameters (id, stmt, fn, bb);
5007 if (debug_nonbind_markers_p && debug_inline_points && id->block
5008 && inlined_function_outer_scope_p (id->block))
5010 gimple_stmt_iterator si = gsi_last_bb (bb);
5011 gsi_insert_after (&si, gimple_build_debug_inline_entry
5012 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
5013 GSI_NEW_STMT);
5016 if (DECL_INITIAL (fn))
5018 if (gimple_block (stmt))
5020 tree *var;
5022 prepend_lexical_block (id->block,
5023 remap_blocks (DECL_INITIAL (fn), id));
5024 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
5025 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
5026 == NULL_TREE));
5027 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
5028 otherwise for DWARF DW_TAG_formal_parameter will not be children of
5029 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
5030 under it. The parameters can be then evaluated in the debugger,
5031 but don't show in backtraces. */
5032 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
5033 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
5035 tree v = *var;
5036 *var = TREE_CHAIN (v);
5037 TREE_CHAIN (v) = BLOCK_VARS (id->block);
5038 BLOCK_VARS (id->block) = v;
5040 else
5041 var = &TREE_CHAIN (*var);
5043 else
5044 remap_blocks_to_null (DECL_INITIAL (fn), id);
5047 /* Return statements in the function body will be replaced by jumps
5048 to the RET_LABEL. */
5049 gcc_assert (DECL_INITIAL (fn));
5050 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
5052 /* Find the LHS to which the result of this call is assigned. */
5053 return_slot = NULL;
5054 if (gimple_call_lhs (stmt))
5056 modify_dest = gimple_call_lhs (stmt);
5058 /* The function which we are inlining might not return a value,
5059 in which case we should issue a warning that the function
5060 does not return a value. In that case the optimizers will
5061 see that the variable to which the value is assigned was not
5062 initialized. We do not want to issue a warning about that
5063 uninitialized variable. */
5064 if (DECL_P (modify_dest))
5065 suppress_warning (modify_dest, OPT_Wuninitialized);
5067 if (gimple_call_return_slot_opt_p (call_stmt))
5069 return_slot = modify_dest;
5070 modify_dest = NULL;
5073 else
5074 modify_dest = NULL;
5076 /* If we are inlining a call to the C++ operator new, we don't want
5077 to use type based alias analysis on the return value. Otherwise
5078 we may get confused if the compiler sees that the inlined new
5079 function returns a pointer which was just deleted. See bug
5080 33407. */
5081 if (DECL_IS_OPERATOR_NEW_P (fn))
5083 return_slot = NULL;
5084 modify_dest = NULL;
5087 /* Declare the return variable for the function. */
5088 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5090 /* Add local vars in this inlined callee to caller. */
5091 add_local_variables (id->src_cfun, cfun, id);
5093 if (dump_enabled_p ())
5095 char buf[128];
5096 snprintf (buf, sizeof(buf), "%4.2f",
5097 cg_edge->sreal_frequency ().to_double ());
5098 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5099 call_stmt,
5100 "Inlining %C to %C with frequency %s\n",
5101 id->src_node, id->dst_node, buf);
5102 if (dump_file && (dump_flags & TDF_DETAILS))
5104 id->src_node->dump (dump_file);
5105 id->dst_node->dump (dump_file);
5109 /* This is it. Duplicate the callee body. Assume callee is
5110 pre-gimplified. Note that we must not alter the caller
5111 function in any way before this point, as this CALL_EXPR may be
5112 a self-referential call; if we're calling ourselves, we need to
5113 duplicate our body before altering anything. */
5114 copy_body (id, bb, return_block, NULL);
5116 reset_debug_bindings (id, stmt_gsi);
5118 if (flag_stack_reuse != SR_NONE)
5119 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5120 if (!TREE_THIS_VOLATILE (p))
5122 /* The value associated with P is a local temporary only if
5123 there is no value associated with P in the debug map. */
5124 tree *varp = id->decl_map->get (p);
5125 if (varp
5126 && VAR_P (*varp)
5127 && !is_gimple_reg (*varp)
5128 && !(id->debug_map && id->debug_map->get (p)))
5130 tree clobber = build_clobber (TREE_TYPE (*varp));
5131 gimple *clobber_stmt;
5132 clobber_stmt = gimple_build_assign (*varp, clobber);
5133 gimple_set_location (clobber_stmt, gimple_location (stmt));
5134 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5138 /* Reset the escaped solution. */
5139 if (cfun->gimple_df)
5140 pt_solution_reset (&cfun->gimple_df->escaped);
5142 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5143 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5145 size_t nargs = gimple_call_num_args (simtenter_stmt);
5146 vec<tree> *vars = id->dst_simt_vars;
5147 auto_vec<tree> newargs (nargs + vars->length ());
5148 for (size_t i = 0; i < nargs; i++)
5149 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5150 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5152 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5153 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5155 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5156 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5157 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5158 gsi_replace (&gsi, g, false);
5160 vec_free (id->dst_simt_vars);
5161 id->dst_simt_vars = simtvars_save;
5163 /* Clean up. */
5164 if (id->debug_map)
5166 delete id->debug_map;
5167 id->debug_map = dst;
5169 delete id->decl_map;
5170 id->decl_map = st;
5172 /* Unlink the calls virtual operands before replacing it. */
5173 unlink_stmt_vdef (stmt);
5174 if (gimple_vdef (stmt)
5175 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5176 release_ssa_name (gimple_vdef (stmt));
5178 /* If the inlined function returns a result that we care about,
5179 substitute the GIMPLE_CALL with an assignment of the return
5180 variable to the LHS of the call. That is, if STMT was
5181 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5182 if (use_retvar && gimple_call_lhs (stmt))
5184 gimple *old_stmt = stmt;
5185 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5186 gimple_set_location (stmt, gimple_location (old_stmt));
5187 gsi_replace (&stmt_gsi, stmt, false);
5188 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5189 /* Append a clobber for id->retvar if easily possible. */
5190 if (flag_stack_reuse != SR_NONE
5191 && id->retvar
5192 && VAR_P (id->retvar)
5193 && id->retvar != return_slot
5194 && id->retvar != modify_dest
5195 && !TREE_THIS_VOLATILE (id->retvar)
5196 && !is_gimple_reg (id->retvar)
5197 && !stmt_ends_bb_p (stmt))
5199 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5200 gimple *clobber_stmt;
5201 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5202 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5203 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5206 else
5208 /* Handle the case of inlining a function with no return
5209 statement, which causes the return value to become undefined. */
5210 if (gimple_call_lhs (stmt)
5211 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5213 tree name = gimple_call_lhs (stmt);
5214 tree var = SSA_NAME_VAR (name);
5215 tree def = var ? ssa_default_def (cfun, var) : NULL;
5217 if (def)
5219 /* If the variable is used undefined, make this name
5220 undefined via a move. */
5221 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5222 gsi_replace (&stmt_gsi, stmt, true);
5224 else
5226 if (!var)
5228 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5229 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5231 /* Otherwise make this variable undefined. */
5232 gsi_remove (&stmt_gsi, true);
5233 set_ssa_default_def (cfun, var, name);
5234 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5237 /* Replace with a clobber for id->retvar. */
5238 else if (flag_stack_reuse != SR_NONE
5239 && id->retvar
5240 && VAR_P (id->retvar)
5241 && id->retvar != return_slot
5242 && id->retvar != modify_dest
5243 && !TREE_THIS_VOLATILE (id->retvar)
5244 && !is_gimple_reg (id->retvar))
5246 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5247 gimple *clobber_stmt;
5248 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5249 gimple_set_location (clobber_stmt, gimple_location (stmt));
5250 gsi_replace (&stmt_gsi, clobber_stmt, false);
5251 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5253 else
5254 gsi_remove (&stmt_gsi, true);
5257 if (purge_dead_abnormal_edges)
5258 bitmap_set_bit (to_purge, return_block->index);
5260 /* If the value of the new expression is ignored, that's OK. We
5261 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5262 the equivalent inlined version either. */
5263 if (is_gimple_assign (stmt))
5265 gcc_assert (gimple_assign_single_p (stmt)
5266 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5267 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5270 id->add_clobbers_to_eh_landing_pads = 0;
5272 /* Output the inlining info for this abstract function, since it has been
5273 inlined. If we don't do this now, we can lose the information about the
5274 variables in the function when the blocks get blown away as soon as we
5275 remove the cgraph node. */
5276 if (gimple_block (stmt))
5277 (*debug_hooks->outlining_inline_function) (fn);
5279 /* Update callgraph if needed. */
5280 cg_edge->callee->remove ();
5282 id->block = NULL_TREE;
5283 id->retvar = NULL_TREE;
5284 successfully_inlined = true;
5286 egress:
5287 input_location = saved_location;
5288 return successfully_inlined;
5291 /* Expand call statements reachable from STMT_P.
5292 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5293 in a MODIFY_EXPR. */
5295 static bool
5296 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5297 bitmap to_purge)
5299 gimple_stmt_iterator gsi;
5300 bool inlined = false;
5302 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5304 gimple *stmt = gsi_stmt (gsi);
5305 gsi_prev (&gsi);
5307 if (is_gimple_call (stmt)
5308 && !gimple_call_internal_p (stmt))
5309 inlined |= expand_call_inline (bb, stmt, id, to_purge);
5312 return inlined;
5316 /* Walk all basic blocks created after FIRST and try to fold every statement
5317 in the STATEMENTS pointer set. */
5319 static void
5320 fold_marked_statements (int first, hash_set<gimple *> *statements)
5322 auto_bitmap to_purge;
5324 auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5325 auto_sbitmap visited (last_basic_block_for_fn (cfun));
5326 bitmap_clear (visited);
5328 stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5329 while (!stack.is_empty ())
5331 /* Look at the edge on the top of the stack. */
5332 edge e = stack.pop ();
5333 basic_block dest = e->dest;
5335 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5336 || bitmap_bit_p (visited, dest->index))
5337 continue;
5339 bitmap_set_bit (visited, dest->index);
5341 if (dest->index >= first)
5342 for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5343 !gsi_end_p (gsi); gsi_next (&gsi))
5345 if (!statements->contains (gsi_stmt (gsi)))
5346 continue;
5348 gimple *old_stmt = gsi_stmt (gsi);
5349 tree old_decl = (is_gimple_call (old_stmt)
5350 ? gimple_call_fndecl (old_stmt) : 0);
5351 if (old_decl && fndecl_built_in_p (old_decl))
5353 /* Folding builtins can create multiple instructions,
5354 we need to look at all of them. */
5355 gimple_stmt_iterator i2 = gsi;
5356 gsi_prev (&i2);
5357 if (fold_stmt (&gsi))
5359 gimple *new_stmt;
5360 /* If a builtin at the end of a bb folded into nothing,
5361 the following loop won't work. */
5362 if (gsi_end_p (gsi))
5364 cgraph_update_edges_for_call_stmt (old_stmt,
5365 old_decl, NULL);
5366 break;
5368 if (gsi_end_p (i2))
5369 i2 = gsi_start_bb (dest);
5370 else
5371 gsi_next (&i2);
5372 while (1)
5374 new_stmt = gsi_stmt (i2);
5375 update_stmt (new_stmt);
5376 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5377 new_stmt);
5379 if (new_stmt == gsi_stmt (gsi))
5381 /* It is okay to check only for the very last
5382 of these statements. If it is a throwing
5383 statement nothing will change. If it isn't
5384 this can remove EH edges. If that weren't
5385 correct then because some intermediate stmts
5386 throw, but not the last one. That would mean
5387 we'd have to split the block, which we can't
5388 here and we'd loose anyway. And as builtins
5389 probably never throw, this all
5390 is mood anyway. */
5391 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5392 new_stmt))
5393 bitmap_set_bit (to_purge, dest->index);
5394 break;
5396 gsi_next (&i2);
5400 else if (fold_stmt (&gsi))
5402 /* Re-read the statement from GSI as fold_stmt() may
5403 have changed it. */
5404 gimple *new_stmt = gsi_stmt (gsi);
5405 update_stmt (new_stmt);
5407 if (is_gimple_call (old_stmt)
5408 || is_gimple_call (new_stmt))
5409 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5410 new_stmt);
5412 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5413 bitmap_set_bit (to_purge, dest->index);
5417 if (EDGE_COUNT (dest->succs) > 0)
5419 /* Avoid warnings emitted from folding statements that
5420 became unreachable because of inlined function parameter
5421 propagation. */
5422 e = find_taken_edge (dest, NULL_TREE);
5423 if (e)
5424 stack.quick_push (e);
5425 else
5427 edge_iterator ei;
5428 FOR_EACH_EDGE (e, ei, dest->succs)
5429 stack.safe_push (e);
5434 gimple_purge_all_dead_eh_edges (to_purge);
5437 /* Expand calls to inline functions in the body of FN. */
5439 unsigned int
5440 optimize_inline_calls (tree fn)
5442 copy_body_data id;
5443 basic_block bb;
5444 int last = n_basic_blocks_for_fn (cfun);
5445 bool inlined_p = false;
5447 /* Clear out ID. */
5448 memset (&id, 0, sizeof (id));
5450 id.src_node = id.dst_node = cgraph_node::get (fn);
5451 gcc_assert (id.dst_node->definition);
5452 id.dst_fn = fn;
5453 /* Or any functions that aren't finished yet. */
5454 if (current_function_decl)
5455 id.dst_fn = current_function_decl;
5457 id.copy_decl = copy_decl_maybe_to_var;
5458 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5459 id.transform_new_cfg = false;
5460 id.transform_return_to_modify = true;
5461 id.transform_parameter = true;
5462 id.transform_lang_insert_block = NULL;
5463 id.statements_to_fold = new hash_set<gimple *>;
5465 push_gimplify_context ();
5467 /* We make no attempts to keep dominance info up-to-date. */
5468 free_dominance_info (CDI_DOMINATORS);
5469 free_dominance_info (CDI_POST_DOMINATORS);
5471 /* Register specific gimple functions. */
5472 gimple_register_cfg_hooks ();
5474 /* Reach the trees by walking over the CFG, and note the
5475 enclosing basic-blocks in the call edges. */
5476 /* We walk the blocks going forward, because inlined function bodies
5477 will split id->current_basic_block, and the new blocks will
5478 follow it; we'll trudge through them, processing their CALL_EXPRs
5479 along the way. */
5480 auto_bitmap to_purge;
5481 FOR_EACH_BB_FN (bb, cfun)
5482 inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5484 pop_gimplify_context (NULL);
5486 if (flag_checking)
5488 struct cgraph_edge *e;
5490 id.dst_node->verify ();
5492 /* Double check that we inlined everything we are supposed to inline. */
5493 for (e = id.dst_node->callees; e; e = e->next_callee)
5494 gcc_assert (e->inline_failed);
5497 /* If we didn't inline into the function there is nothing to do. */
5498 if (!inlined_p)
5500 delete id.statements_to_fold;
5501 return 0;
5504 /* Fold queued statements. */
5505 update_max_bb_count ();
5506 fold_marked_statements (last, id.statements_to_fold);
5507 delete id.statements_to_fold;
5509 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5510 We need to do this after fold_marked_statements since that may walk
5511 the SSA use-def chain. */
5512 unsigned i;
5513 bitmap_iterator bi;
5514 EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5516 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5517 if (bb)
5519 gimple_purge_dead_eh_edges (bb);
5520 gimple_purge_dead_abnormal_call_edges (bb);
5524 gcc_assert (!id.debug_stmts.exists ());
5526 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5527 number_blocks (fn);
5529 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5530 id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5532 if (flag_checking)
5533 id.dst_node->verify ();
5535 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5536 not possible yet - the IPA passes might make various functions to not
5537 throw and they don't care to proactively update local EH info. This is
5538 done later in fixup_cfg pass that also execute the verification. */
5539 return (TODO_update_ssa
5540 | TODO_cleanup_cfg
5541 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5542 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5543 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5544 ? TODO_rebuild_frequencies : 0));
5547 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5549 tree
5550 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5552 enum tree_code code = TREE_CODE (*tp);
5553 enum tree_code_class cl = TREE_CODE_CLASS (code);
5555 /* We make copies of most nodes. */
5556 if (IS_EXPR_CODE_CLASS (cl)
5557 || code == TREE_LIST
5558 || code == TREE_VEC
5559 || code == TYPE_DECL
5560 || code == OMP_CLAUSE)
5562 /* Because the chain gets clobbered when we make a copy, we save it
5563 here. */
5564 tree chain = NULL_TREE, new_tree;
5566 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5567 chain = TREE_CHAIN (*tp);
5569 /* Copy the node. */
5570 new_tree = copy_node (*tp);
5572 *tp = new_tree;
5574 /* Now, restore the chain, if appropriate. That will cause
5575 walk_tree to walk into the chain as well. */
5576 if (code == PARM_DECL
5577 || code == TREE_LIST
5578 || code == OMP_CLAUSE)
5579 TREE_CHAIN (*tp) = chain;
5581 /* For now, we don't update BLOCKs when we make copies. So, we
5582 have to nullify all BIND_EXPRs. */
5583 if (TREE_CODE (*tp) == BIND_EXPR)
5584 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5586 else if (code == CONSTRUCTOR)
5588 /* CONSTRUCTOR nodes need special handling because
5589 we need to duplicate the vector of elements. */
5590 tree new_tree;
5592 new_tree = copy_node (*tp);
5593 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5594 *tp = new_tree;
5596 else if (code == STATEMENT_LIST)
5597 /* We used to just abort on STATEMENT_LIST, but we can run into them
5598 with statement-expressions (c++/40975). */
5599 copy_statement_list (tp);
5600 else if (TREE_CODE_CLASS (code) == tcc_type)
5601 *walk_subtrees = 0;
5602 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5603 *walk_subtrees = 0;
5604 else if (TREE_CODE_CLASS (code) == tcc_constant)
5605 *walk_subtrees = 0;
5606 return NULL_TREE;
5609 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5610 information indicating to what new SAVE_EXPR this one should be mapped,
5611 use that one. Otherwise, create a new node and enter it in ST. FN is
5612 the function into which the copy will be placed. */
5614 static void
5615 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5617 tree *n;
5618 tree t;
5620 /* See if we already encountered this SAVE_EXPR. */
5621 n = st->get (*tp);
5623 /* If we didn't already remap this SAVE_EXPR, do so now. */
5624 if (!n)
5626 t = copy_node (*tp);
5628 /* Remember this SAVE_EXPR. */
5629 st->put (*tp, t);
5630 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5631 st->put (t, t);
5633 else
5635 /* We've already walked into this SAVE_EXPR; don't do it again. */
5636 *walk_subtrees = 0;
5637 t = *n;
5640 /* Replace this SAVE_EXPR with the copy. */
5641 *tp = t;
5644 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5645 label, copies the declaration and enters it in the splay_tree in DATA (which
5646 is really a 'copy_body_data *'. */
5648 static tree
5649 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5650 bool *handled_ops_p ATTRIBUTE_UNUSED,
5651 struct walk_stmt_info *wi)
5653 copy_body_data *id = (copy_body_data *) wi->info;
5654 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5656 if (stmt)
5658 tree decl = gimple_label_label (stmt);
5660 /* Copy the decl and remember the copy. */
5661 insert_decl_map (id, decl, id->copy_decl (decl, id));
5664 return NULL_TREE;
5667 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5668 struct walk_stmt_info *wi);
5670 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5671 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5672 remaps all local declarations to appropriate replacements in gimple
5673 operands. */
5675 static tree
5676 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5678 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5679 copy_body_data *id = (copy_body_data *) wi->info;
5680 hash_map<tree, tree> *st = id->decl_map;
5681 tree *n;
5682 tree expr = *tp;
5684 /* For recursive invocations this is no longer the LHS itself. */
5685 bool is_lhs = wi->is_lhs;
5686 wi->is_lhs = false;
5688 if (TREE_CODE (expr) == SSA_NAME)
5690 *tp = remap_ssa_name (*tp, id);
5691 *walk_subtrees = 0;
5692 if (is_lhs)
5693 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5695 /* Only a local declaration (variable or label). */
5696 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5697 || TREE_CODE (expr) == LABEL_DECL)
5699 /* Lookup the declaration. */
5700 n = st->get (expr);
5702 /* If it's there, remap it. */
5703 if (n)
5704 *tp = *n;
5705 *walk_subtrees = 0;
5707 else if (TREE_CODE (expr) == STATEMENT_LIST
5708 || TREE_CODE (expr) == BIND_EXPR
5709 || TREE_CODE (expr) == SAVE_EXPR)
5710 gcc_unreachable ();
5711 else if (TREE_CODE (expr) == TARGET_EXPR)
5713 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5714 It's OK for this to happen if it was part of a subtree that
5715 isn't immediately expanded, such as operand 2 of another
5716 TARGET_EXPR. */
5717 if (!TREE_OPERAND (expr, 1))
5719 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5720 TREE_OPERAND (expr, 3) = NULL_TREE;
5723 else if (TREE_CODE (expr) == OMP_CLAUSE)
5725 /* Before the omplower pass completes, some OMP clauses can contain
5726 sequences that are neither copied by gimple_seq_copy nor walked by
5727 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5728 in those situations, we have to copy and process them explicitely. */
5730 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5732 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5733 seq = duplicate_remap_omp_clause_seq (seq, wi);
5734 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5736 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5738 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5739 seq = duplicate_remap_omp_clause_seq (seq, wi);
5740 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5742 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5744 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5745 seq = duplicate_remap_omp_clause_seq (seq, wi);
5746 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5747 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5748 seq = duplicate_remap_omp_clause_seq (seq, wi);
5749 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5753 /* Keep iterating. */
5754 return NULL_TREE;
5758 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5759 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5760 remaps all local declarations to appropriate replacements in gimple
5761 statements. */
5763 static tree
5764 replace_locals_stmt (gimple_stmt_iterator *gsip,
5765 bool *handled_ops_p ATTRIBUTE_UNUSED,
5766 struct walk_stmt_info *wi)
5768 copy_body_data *id = (copy_body_data *) wi->info;
5769 gimple *gs = gsi_stmt (*gsip);
5771 if (gbind *stmt = dyn_cast <gbind *> (gs))
5773 tree block = gimple_bind_block (stmt);
5775 if (block)
5777 remap_block (&block, id);
5778 gimple_bind_set_block (stmt, block);
5781 /* This will remap a lot of the same decls again, but this should be
5782 harmless. */
5783 if (gimple_bind_vars (stmt))
5785 tree old_var, decls = gimple_bind_vars (stmt);
5787 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5788 if (!can_be_nonlocal (old_var, id)
5789 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5790 remap_decl (old_var, id);
5792 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5793 id->prevent_decl_creation_for_types = true;
5794 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5795 id->prevent_decl_creation_for_types = false;
5799 /* Keep iterating. */
5800 return NULL_TREE;
5803 /* Create a copy of SEQ and remap all decls in it. */
5805 static gimple_seq
5806 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5808 if (!seq)
5809 return NULL;
5811 /* If there are any labels in OMP sequences, they can be only referred to in
5812 the sequence itself and therefore we can do both here. */
5813 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5814 gimple_seq copy = gimple_seq_copy (seq);
5815 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5816 return copy;
5819 /* Copies everything in SEQ and replaces variables and labels local to
5820 current_function_decl. */
5822 gimple_seq
5823 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5825 copy_body_data id;
5826 struct walk_stmt_info wi;
5827 gimple_seq copy;
5829 /* There's nothing to do for NULL_TREE. */
5830 if (seq == NULL)
5831 return seq;
5833 /* Set up ID. */
5834 memset (&id, 0, sizeof (id));
5835 id.src_fn = current_function_decl;
5836 id.dst_fn = current_function_decl;
5837 id.src_cfun = cfun;
5838 id.decl_map = new hash_map<tree, tree>;
5839 id.debug_map = NULL;
5841 id.copy_decl = copy_decl_no_change;
5842 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5843 id.transform_new_cfg = false;
5844 id.transform_return_to_modify = false;
5845 id.transform_parameter = false;
5846 id.transform_lang_insert_block = NULL;
5848 /* Walk the tree once to find local labels. */
5849 memset (&wi, 0, sizeof (wi));
5850 hash_set<tree> visited;
5851 wi.info = &id;
5852 wi.pset = &visited;
5853 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5855 copy = gimple_seq_copy (seq);
5857 /* Walk the copy, remapping decls. */
5858 memset (&wi, 0, sizeof (wi));
5859 wi.info = &id;
5860 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5862 /* Clean up. */
5863 delete id.decl_map;
5864 if (id.debug_map)
5865 delete id.debug_map;
5866 if (id.dependence_map)
5868 delete id.dependence_map;
5869 id.dependence_map = NULL;
5872 return copy;
5876 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5878 static tree
5879 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5881 if (*tp == data)
5882 return (tree) data;
5883 else
5884 return NULL;
5887 DEBUG_FUNCTION bool
5888 debug_find_tree (tree top, tree search)
5890 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5894 /* Declare the variables created by the inliner. Add all the variables in
5895 VARS to BIND_EXPR. */
5897 static void
5898 declare_inline_vars (tree block, tree vars)
5900 tree t;
5901 for (t = vars; t; t = DECL_CHAIN (t))
5903 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5904 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5905 add_local_decl (cfun, t);
5908 if (block)
5909 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5912 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5913 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5914 VAR_DECL translation. */
5916 tree
5917 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5919 /* Don't generate debug information for the copy if we wouldn't have
5920 generated it for the copy either. */
5921 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5922 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5924 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5925 declaration inspired this copy. */
5926 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5928 /* The new variable/label has no RTL, yet. */
5929 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5930 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5931 SET_DECL_RTL (copy, 0);
5932 /* For vector typed decls make sure to update DECL_MODE according
5933 to the new function context. */
5934 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5935 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5937 /* These args would always appear unused, if not for this. */
5938 TREE_USED (copy) = 1;
5940 /* Set the context for the new declaration. */
5941 if (!DECL_CONTEXT (decl))
5942 /* Globals stay global. */
5944 else if (DECL_CONTEXT (decl) != id->src_fn)
5945 /* Things that weren't in the scope of the function we're inlining
5946 from aren't in the scope we're inlining to, either. */
5948 else if (TREE_STATIC (decl))
5949 /* Function-scoped static variables should stay in the original
5950 function. */
5952 else
5954 /* Ordinary automatic local variables are now in the scope of the
5955 new function. */
5956 DECL_CONTEXT (copy) = id->dst_fn;
5957 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5959 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5960 DECL_ATTRIBUTES (copy)
5961 = tree_cons (get_identifier ("omp simt private"), NULL,
5962 DECL_ATTRIBUTES (copy));
5963 id->dst_simt_vars->safe_push (copy);
5967 return copy;
5970 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5971 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5972 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5974 tree
5975 copy_decl_to_var (tree decl, copy_body_data *id)
5977 tree copy, type;
5979 gcc_assert (TREE_CODE (decl) == PARM_DECL
5980 || TREE_CODE (decl) == RESULT_DECL);
5982 type = TREE_TYPE (decl);
5984 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5985 VAR_DECL, DECL_NAME (decl), type);
5986 if (DECL_PT_UID_SET_P (decl))
5987 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5988 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5989 TREE_READONLY (copy) = TREE_READONLY (decl);
5990 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5991 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
5992 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
5994 return copy_decl_for_dup_finish (id, decl, copy);
5997 /* Like copy_decl_to_var, but create a return slot object instead of a
5998 pointer variable for return by invisible reference. */
6000 static tree
6001 copy_result_decl_to_var (tree decl, copy_body_data *id)
6003 tree copy, type;
6005 gcc_assert (TREE_CODE (decl) == PARM_DECL
6006 || TREE_CODE (decl) == RESULT_DECL);
6008 type = TREE_TYPE (decl);
6009 if (DECL_BY_REFERENCE (decl))
6010 type = TREE_TYPE (type);
6012 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
6013 VAR_DECL, DECL_NAME (decl), type);
6014 if (DECL_PT_UID_SET_P (decl))
6015 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
6016 TREE_READONLY (copy) = TREE_READONLY (decl);
6017 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6018 if (!DECL_BY_REFERENCE (decl))
6020 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6021 DECL_NOT_GIMPLE_REG_P (copy)
6022 = (DECL_NOT_GIMPLE_REG_P (decl)
6023 /* RESULT_DECLs are treated special by needs_to_live_in_memory,
6024 mirror that to the created VAR_DECL. */
6025 || (TREE_CODE (decl) == RESULT_DECL
6026 && aggregate_value_p (decl, id->src_fn)));
6029 return copy_decl_for_dup_finish (id, decl, copy);
6032 tree
6033 copy_decl_no_change (tree decl, copy_body_data *id)
6035 tree copy;
6037 copy = copy_node (decl);
6039 /* The COPY is not abstract; it will be generated in DST_FN. */
6040 DECL_ABSTRACT_P (copy) = false;
6041 lang_hooks.dup_lang_specific_decl (copy);
6043 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6044 been taken; it's for internal bookkeeping in expand_goto_internal. */
6045 if (TREE_CODE (copy) == LABEL_DECL)
6047 TREE_ADDRESSABLE (copy) = 0;
6048 LABEL_DECL_UID (copy) = -1;
6051 return copy_decl_for_dup_finish (id, decl, copy);
6054 static tree
6055 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6057 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6058 return copy_decl_to_var (decl, id);
6059 else
6060 return copy_decl_no_change (decl, id);
6063 /* Return a copy of the function's argument tree without any modifications. */
6065 static tree
6066 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6068 tree arg, *parg;
6069 tree new_parm = NULL;
6071 parg = &new_parm;
6072 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6074 tree new_tree = remap_decl (arg, id);
6075 if (TREE_CODE (new_tree) != PARM_DECL)
6076 new_tree = id->copy_decl (arg, id);
6077 lang_hooks.dup_lang_specific_decl (new_tree);
6078 *parg = new_tree;
6079 parg = &DECL_CHAIN (new_tree);
6081 return new_parm;
6084 /* Return a copy of the function's static chain. */
6085 static tree
6086 copy_static_chain (tree static_chain, copy_body_data * id)
6088 tree *chain_copy, *pvar;
6090 chain_copy = &static_chain;
6091 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6093 tree new_tree = remap_decl (*pvar, id);
6094 lang_hooks.dup_lang_specific_decl (new_tree);
6095 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6096 *pvar = new_tree;
6098 return static_chain;
6101 /* Return true if the function is allowed to be versioned.
6102 This is a guard for the versioning functionality. */
6104 bool
6105 tree_versionable_function_p (tree fndecl)
6107 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6108 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6111 /* Update clone info after duplication. */
6113 static void
6114 update_clone_info (copy_body_data * id)
6116 struct cgraph_node *this_node = id->dst_node;
6117 if (!this_node->clones)
6118 return;
6119 for (cgraph_node *node = this_node->clones; node != this_node;)
6121 /* First update replace maps to match the new body. */
6122 clone_info *info = clone_info::get (node);
6123 if (info && info->tree_map)
6125 unsigned int i;
6126 for (i = 0; i < vec_safe_length (info->tree_map); i++)
6128 struct ipa_replace_map *replace_info;
6129 replace_info = (*info->tree_map)[i];
6130 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6134 if (node->clones)
6135 node = node->clones;
6136 else if (node->next_sibling_clone)
6137 node = node->next_sibling_clone;
6138 else
6140 while (node != id->dst_node && !node->next_sibling_clone)
6141 node = node->clone_of;
6142 if (node != id->dst_node)
6143 node = node->next_sibling_clone;
6148 /* Create a copy of a function's tree.
6149 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6150 of the original function and the new copied function
6151 respectively. In case we want to replace a DECL
6152 tree with another tree while duplicating the function's
6153 body, TREE_MAP represents the mapping between these
6154 trees. If UPDATE_CLONES is set, the call_stmt fields
6155 of edges of clones of the function will be updated.
6157 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6158 function parameters and return value) should be modified).
6159 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6160 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6162 void
6163 tree_function_versioning (tree old_decl, tree new_decl,
6164 vec<ipa_replace_map *, va_gc> *tree_map,
6165 ipa_param_adjustments *param_adjustments,
6166 bool update_clones, bitmap blocks_to_copy,
6167 basic_block new_entry)
6169 struct cgraph_node *old_version_node;
6170 struct cgraph_node *new_version_node;
6171 copy_body_data id;
6172 tree p;
6173 unsigned i;
6174 struct ipa_replace_map *replace_info;
6175 basic_block old_entry_block, bb;
6176 auto_vec<gimple *, 10> init_stmts;
6177 tree vars = NULL_TREE;
6179 /* We can get called recursively from expand_call_inline via clone
6180 materialization. While expand_call_inline maintains input_location
6181 we cannot tolerate it to leak into the materialized clone. */
6182 location_t saved_location = input_location;
6183 input_location = UNKNOWN_LOCATION;
6185 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6186 && TREE_CODE (new_decl) == FUNCTION_DECL);
6187 DECL_POSSIBLY_INLINED (old_decl) = 1;
6189 old_version_node = cgraph_node::get (old_decl);
6190 gcc_checking_assert (old_version_node);
6191 new_version_node = cgraph_node::get (new_decl);
6192 gcc_checking_assert (new_version_node);
6194 /* Copy over debug args. */
6195 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6197 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6198 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6199 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6200 old_debug_args = decl_debug_args_lookup (old_decl);
6201 if (old_debug_args)
6203 new_debug_args = decl_debug_args_insert (new_decl);
6204 *new_debug_args = vec_safe_copy (*old_debug_args);
6208 /* Output the inlining info for this abstract function, since it has been
6209 inlined. If we don't do this now, we can lose the information about the
6210 variables in the function when the blocks get blown away as soon as we
6211 remove the cgraph node. */
6212 (*debug_hooks->outlining_inline_function) (old_decl);
6214 DECL_ARTIFICIAL (new_decl) = 1;
6215 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6216 if (DECL_ORIGIN (old_decl) == old_decl)
6217 old_version_node->used_as_abstract_origin = true;
6218 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6220 /* Prepare the data structures for the tree copy. */
6221 memset (&id, 0, sizeof (id));
6223 /* Generate a new name for the new version. */
6224 id.statements_to_fold = new hash_set<gimple *>;
6226 id.decl_map = new hash_map<tree, tree>;
6227 id.debug_map = NULL;
6228 id.src_fn = old_decl;
6229 id.dst_fn = new_decl;
6230 id.src_node = old_version_node;
6231 id.dst_node = new_version_node;
6232 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6233 id.blocks_to_copy = blocks_to_copy;
6235 id.copy_decl = copy_decl_no_change;
6236 id.transform_call_graph_edges
6237 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6238 id.transform_new_cfg = true;
6239 id.transform_return_to_modify = false;
6240 id.transform_parameter = false;
6241 id.transform_lang_insert_block = NULL;
6243 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6244 (DECL_STRUCT_FUNCTION (old_decl));
6245 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6246 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6247 initialize_cfun (new_decl, old_decl,
6248 new_entry ? new_entry->count : old_entry_block->count);
6249 new_version_node->calls_declare_variant_alt
6250 = old_version_node->calls_declare_variant_alt;
6251 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6252 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6253 = id.src_cfun->gimple_df->ipa_pta;
6255 /* Copy the function's static chain. */
6256 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6257 if (p)
6258 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6259 = copy_static_chain (p, &id);
6261 auto_vec<int, 16> new_param_indices;
6262 clone_info *info = clone_info::get (old_version_node);
6263 ipa_param_adjustments *old_param_adjustments
6264 = info ? info->param_adjustments : NULL;
6265 if (old_param_adjustments)
6266 old_param_adjustments->get_updated_indices (&new_param_indices);
6268 /* If there's a tree_map, prepare for substitution. */
6269 if (tree_map)
6270 for (i = 0; i < tree_map->length (); i++)
6272 gimple *init;
6273 replace_info = (*tree_map)[i];
6275 int p = replace_info->parm_num;
6276 if (old_param_adjustments)
6277 p = new_param_indices[p];
6279 tree parm;
6280 for (parm = DECL_ARGUMENTS (old_decl); p;
6281 parm = DECL_CHAIN (parm))
6282 p--;
6283 gcc_assert (parm);
6284 init = setup_one_parameter (&id, parm, replace_info->new_tree,
6285 id.src_fn, NULL, &vars);
6286 if (init)
6287 init_stmts.safe_push (init);
6290 ipa_param_body_adjustments *param_body_adjs = NULL;
6291 if (param_adjustments)
6293 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6294 new_decl, old_decl,
6295 &id, &vars, tree_map);
6296 id.param_body_adjs = param_body_adjs;
6297 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6299 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6300 DECL_ARGUMENTS (new_decl)
6301 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6303 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6304 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6306 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6308 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6309 /* Add local vars. */
6310 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6312 if (DECL_RESULT (old_decl) == NULL_TREE)
6314 else if (param_adjustments && param_adjustments->m_skip_return
6315 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6317 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6318 &id);
6319 declare_inline_vars (NULL, resdecl_repl);
6320 if (DECL_BY_REFERENCE (DECL_RESULT (old_decl)))
6321 resdecl_repl = build_fold_addr_expr (resdecl_repl);
6322 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6324 DECL_RESULT (new_decl)
6325 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6326 RESULT_DECL, NULL_TREE, void_type_node);
6327 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6328 DECL_IS_MALLOC (new_decl) = false;
6329 cfun->returns_struct = 0;
6330 cfun->returns_pcc_struct = 0;
6332 else
6334 tree old_name;
6335 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6336 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6337 if (gimple_in_ssa_p (id.src_cfun)
6338 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6339 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6341 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6342 insert_decl_map (&id, old_name, new_name);
6343 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6344 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6348 /* Set up the destination functions loop tree. */
6349 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6351 cfun->curr_properties &= ~PROP_loops;
6352 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6353 cfun->curr_properties |= PROP_loops;
6356 /* Copy the Function's body. */
6357 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6358 new_entry);
6360 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6361 number_blocks (new_decl);
6363 /* We want to create the BB unconditionally, so that the addition of
6364 debug stmts doesn't affect BB count, which may in the end cause
6365 codegen differences. */
6366 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6367 while (init_stmts.length ())
6368 insert_init_stmt (&id, bb, init_stmts.pop ());
6369 update_clone_info (&id);
6371 /* Remap the nonlocal_goto_save_area, if any. */
6372 if (cfun->nonlocal_goto_save_area)
6374 struct walk_stmt_info wi;
6376 memset (&wi, 0, sizeof (wi));
6377 wi.info = &id;
6378 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6381 /* Clean up. */
6382 delete id.decl_map;
6383 if (id.debug_map)
6384 delete id.debug_map;
6385 free_dominance_info (CDI_DOMINATORS);
6386 free_dominance_info (CDI_POST_DOMINATORS);
6388 update_max_bb_count ();
6389 fold_marked_statements (0, id.statements_to_fold);
6390 delete id.statements_to_fold;
6391 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6392 if (id.dst_node->definition)
6393 cgraph_edge::rebuild_references ();
6394 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6396 calculate_dominance_info (CDI_DOMINATORS);
6397 fix_loop_structure (NULL);
6399 update_ssa (TODO_update_ssa);
6401 /* After partial cloning we need to rescale frequencies, so they are
6402 within proper range in the cloned function. */
6403 if (new_entry)
6405 struct cgraph_edge *e;
6406 rebuild_frequencies ();
6408 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6409 for (e = new_version_node->callees; e; e = e->next_callee)
6411 basic_block bb = gimple_bb (e->call_stmt);
6412 e->count = bb->count;
6414 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6416 basic_block bb = gimple_bb (e->call_stmt);
6417 e->count = bb->count;
6421 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6423 vec<tree, va_gc> **debug_args = NULL;
6424 unsigned int len = 0;
6425 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6427 for (i = 0; i < reset_len; i++)
6429 tree parm = param_body_adjs->m_reset_debug_decls[i];
6430 gcc_assert (is_gimple_reg (parm));
6431 tree ddecl;
6433 if (debug_args == NULL)
6435 debug_args = decl_debug_args_insert (new_decl);
6436 len = vec_safe_length (*debug_args);
6438 ddecl = make_node (DEBUG_EXPR_DECL);
6439 DECL_ARTIFICIAL (ddecl) = 1;
6440 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6441 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6442 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6443 vec_safe_push (*debug_args, ddecl);
6445 if (debug_args != NULL)
6447 /* On the callee side, add
6448 DEBUG D#Y s=> parm
6449 DEBUG var => D#Y
6450 stmts to the first bb where var is a VAR_DECL created for the
6451 optimized away parameter in DECL_INITIAL block. This hints
6452 in the debug info that var (whole DECL_ORIGIN is the parm
6453 PARM_DECL) is optimized away, but could be looked up at the
6454 call site as value of D#X there. */
6455 tree vexpr;
6456 gimple_stmt_iterator cgsi
6457 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6458 gimple *def_temp;
6459 tree var = vars;
6460 i = vec_safe_length (*debug_args);
6463 i -= 2;
6464 while (var != NULL_TREE
6465 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6466 var = TREE_CHAIN (var);
6467 if (var == NULL_TREE)
6468 break;
6469 vexpr = make_node (DEBUG_EXPR_DECL);
6470 tree parm = (**debug_args)[i];
6471 DECL_ARTIFICIAL (vexpr) = 1;
6472 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6473 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6474 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6475 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6476 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6477 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6479 while (i > len);
6482 delete param_body_adjs;
6483 free_dominance_info (CDI_DOMINATORS);
6484 free_dominance_info (CDI_POST_DOMINATORS);
6486 gcc_assert (!id.debug_stmts.exists ());
6487 pop_cfun ();
6488 input_location = saved_location;
6489 return;
6492 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6493 the callee and return the inlined body on success. */
6495 tree
6496 maybe_inline_call_in_expr (tree exp)
6498 tree fn = get_callee_fndecl (exp);
6500 /* We can only try to inline "const" functions. */
6501 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6503 call_expr_arg_iterator iter;
6504 copy_body_data id;
6505 tree param, arg, t;
6506 hash_map<tree, tree> decl_map;
6508 /* Remap the parameters. */
6509 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6510 param;
6511 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6512 decl_map.put (param, arg);
6514 memset (&id, 0, sizeof (id));
6515 id.src_fn = fn;
6516 id.dst_fn = current_function_decl;
6517 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6518 id.decl_map = &decl_map;
6520 id.copy_decl = copy_decl_no_change;
6521 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6522 id.transform_new_cfg = false;
6523 id.transform_return_to_modify = true;
6524 id.transform_parameter = true;
6525 id.transform_lang_insert_block = NULL;
6527 /* Make sure not to unshare trees behind the front-end's back
6528 since front-end specific mechanisms may rely on sharing. */
6529 id.regimplify = false;
6530 id.do_not_unshare = true;
6532 /* We're not inside any EH region. */
6533 id.eh_lp_nr = 0;
6535 t = copy_tree_body (&id);
6537 /* We can only return something suitable for use in a GENERIC
6538 expression tree. */
6539 if (TREE_CODE (t) == MODIFY_EXPR)
6540 return TREE_OPERAND (t, 1);
6543 return NULL_TREE;
6546 /* Duplicate a type, fields and all. */
6548 tree
6549 build_duplicate_type (tree type)
6551 struct copy_body_data id;
6553 memset (&id, 0, sizeof (id));
6554 id.src_fn = current_function_decl;
6555 id.dst_fn = current_function_decl;
6556 id.src_cfun = cfun;
6557 id.decl_map = new hash_map<tree, tree>;
6558 id.debug_map = NULL;
6559 id.copy_decl = copy_decl_no_change;
6561 type = remap_type_1 (type, &id);
6563 delete id.decl_map;
6564 if (id.debug_map)
6565 delete id.debug_map;
6567 TYPE_CANONICAL (type) = type;
6569 return type;
6572 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6573 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6574 evaluation. */
6576 tree
6577 copy_fn (tree fn, tree& parms, tree& result)
6579 copy_body_data id;
6580 tree param;
6581 hash_map<tree, tree> decl_map;
6583 tree *p = &parms;
6584 *p = NULL_TREE;
6586 memset (&id, 0, sizeof (id));
6587 id.src_fn = fn;
6588 id.dst_fn = current_function_decl;
6589 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6590 id.decl_map = &decl_map;
6592 id.copy_decl = copy_decl_no_change;
6593 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6594 id.transform_new_cfg = false;
6595 id.transform_return_to_modify = false;
6596 id.transform_parameter = true;
6597 id.transform_lang_insert_block = NULL;
6599 /* Make sure not to unshare trees behind the front-end's back
6600 since front-end specific mechanisms may rely on sharing. */
6601 id.regimplify = false;
6602 id.do_not_unshare = true;
6603 id.do_not_fold = true;
6605 /* We're not inside any EH region. */
6606 id.eh_lp_nr = 0;
6608 /* Remap the parameters and result and return them to the caller. */
6609 for (param = DECL_ARGUMENTS (fn);
6610 param;
6611 param = DECL_CHAIN (param))
6613 *p = remap_decl (param, &id);
6614 p = &DECL_CHAIN (*p);
6617 if (DECL_RESULT (fn))
6618 result = remap_decl (DECL_RESULT (fn), &id);
6619 else
6620 result = NULL_TREE;
6622 return copy_tree_body (&id);