Emit .note.GNU-stack for hard-float linux targets.
[official-gcc.git] / gcc / tree-inline.c
blob2197769bf1742fee4f7bcc91826db73125999cd0
1 /* Tree inlining.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
65 /* I'm not real happy about this, but we need to handle gimple and
66 non-gimple trees. */
68 /* Inlining, Cloning, Versioning, Parallelization
70 Inlining: a function body is duplicated, but the PARM_DECLs are
71 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72 MODIFY_EXPRs that store to a dedicated returned-value variable.
73 The duplicated eh_region info of the copy will later be appended
74 to the info for the caller; the eh_region info in copied throwing
75 statements and RESX statements are adjusted accordingly.
77 Cloning: (only in C++) We have one body for a con/de/structor, and
78 multiple function decls, each with a unique parameter list.
79 Duplicate the body, using the given splay tree; some parameters
80 will become constants (like 0 or 1).
82 Versioning: a function body is duplicated and the result is a new
83 function rather than into blocks of an existing function as with
84 inlining. Some parameters will become constants.
86 Parallelization: a region of a function is duplicated resulting in
87 a new function. Variables may be replaced with complex expressions
88 to enable shared variable semantics.
90 All of these will simultaneously lookup any callgraph edges. If
91 we're going to inline the duplicated function body, and the given
92 function has some cloned callgraph nodes (one for each place this
93 function will be inlined) those callgraph edges will be duplicated.
94 If we're cloning the body, those callgraph edges will be
95 updated to point into the new body. (Note that the original
96 callgraph node and edge list will not be altered.)
98 See the CALL_EXPR handling case in copy_tree_body_r (). */
100 /* To Do:
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
107 are not needed.
109 o Provide heuristics to clamp inlining of recursive template
110 calls? */
113 /* Weights that estimate_num_insns uses to estimate the size of the
114 produced code. */
116 eni_weights eni_size_weights;
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119 to execute the produced code. */
121 eni_weights eni_time_weights;
123 /* Prototypes. */
125 static tree declare_return_variable (copy_body_data *, tree, tree,
126 basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_result_decl_to_var (tree, copy_body_data *);
133 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
134 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
135 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
137 /* Insert a tree->tree mapping for ID. Despite the name suggests
138 that the trees should be variables, it is used for more than that. */
140 void
141 insert_decl_map (copy_body_data *id, tree key, tree value)
143 id->decl_map->put (key, value);
145 /* Always insert an identity map as well. If we see this same new
146 node again, we won't want to duplicate it a second time. */
147 if (key != value)
148 id->decl_map->put (value, value);
151 /* Insert a tree->tree mapping for ID. This is only used for
152 variables. */
154 static void
155 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
157 if (!gimple_in_ssa_p (id->src_cfun))
158 return;
160 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
161 return;
163 if (!target_for_debug_bind (key))
164 return;
166 gcc_assert (TREE_CODE (key) == PARM_DECL);
167 gcc_assert (VAR_P (value));
169 if (!id->debug_map)
170 id->debug_map = new hash_map<tree, tree>;
172 id->debug_map->put (key, value);
175 /* If nonzero, we're remapping the contents of inlined debug
176 statements. If negative, an error has occurred, such as a
177 reference to a variable that isn't available in the inlined
178 context. */
179 static int processing_debug_stmt = 0;
181 /* Construct new SSA name for old NAME. ID is the inline context. */
183 static tree
184 remap_ssa_name (tree name, copy_body_data *id)
186 tree new_tree, var;
187 tree *n;
189 gcc_assert (TREE_CODE (name) == SSA_NAME);
191 n = id->decl_map->get (name);
192 if (n)
194 /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
195 remove an unused LHS from a call statement. Such LHS can however
196 still appear in debug statements, but their value is lost in this
197 function and we do not want to map them. */
198 if (id->killed_new_ssa_names
199 && id->killed_new_ssa_names->contains (*n))
201 gcc_assert (processing_debug_stmt);
202 processing_debug_stmt = -1;
203 return name;
206 return unshare_expr (*n);
209 if (processing_debug_stmt)
211 if (SSA_NAME_IS_DEFAULT_DEF (name)
212 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
213 && id->entry_bb == NULL
214 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
216 tree vexpr = make_node (DEBUG_EXPR_DECL);
217 gimple *def_temp;
218 gimple_stmt_iterator gsi;
219 tree val = SSA_NAME_VAR (name);
221 n = id->decl_map->get (val);
222 if (n != NULL)
223 val = *n;
224 if (TREE_CODE (val) != PARM_DECL
225 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
227 processing_debug_stmt = -1;
228 return name;
230 n = id->decl_map->get (val);
231 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
232 return *n;
233 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
234 DECL_ARTIFICIAL (vexpr) = 1;
235 TREE_TYPE (vexpr) = TREE_TYPE (name);
236 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
237 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
238 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
239 insert_decl_map (id, val, vexpr);
240 return vexpr;
243 processing_debug_stmt = -1;
244 return name;
247 /* Remap anonymous SSA names or SSA names of anonymous decls. */
248 var = SSA_NAME_VAR (name);
249 if (!var
250 || (!SSA_NAME_IS_DEFAULT_DEF (name)
251 && VAR_P (var)
252 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
253 && DECL_ARTIFICIAL (var)
254 && DECL_IGNORED_P (var)
255 && !DECL_NAME (var)))
257 struct ptr_info_def *pi;
258 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
259 if (!var && SSA_NAME_IDENTIFIER (name))
260 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
261 insert_decl_map (id, name, new_tree);
262 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
263 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
264 /* At least IPA points-to info can be directly transferred. */
265 if (id->src_cfun->gimple_df
266 && id->src_cfun->gimple_df->ipa_pta
267 && POINTER_TYPE_P (TREE_TYPE (name))
268 && (pi = SSA_NAME_PTR_INFO (name))
269 && !pi->pt.anything)
271 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
272 new_pi->pt = pi->pt;
274 /* So can range-info. */
275 if (!POINTER_TYPE_P (TREE_TYPE (name))
276 && SSA_NAME_RANGE_INFO (name))
277 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
278 SSA_NAME_RANGE_INFO (name));
279 return new_tree;
282 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
283 in copy_bb. */
284 new_tree = remap_decl (var, id);
286 /* We might've substituted constant or another SSA_NAME for
287 the variable.
289 Replace the SSA name representing RESULT_DECL by variable during
290 inlining: this saves us from need to introduce PHI node in a case
291 return value is just partly initialized. */
292 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
293 && (!SSA_NAME_VAR (name)
294 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
295 || !id->transform_return_to_modify))
297 struct ptr_info_def *pi;
298 new_tree = make_ssa_name (new_tree);
299 insert_decl_map (id, name, new_tree);
300 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
301 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
302 /* At least IPA points-to info can be directly transferred. */
303 if (id->src_cfun->gimple_df
304 && id->src_cfun->gimple_df->ipa_pta
305 && POINTER_TYPE_P (TREE_TYPE (name))
306 && (pi = SSA_NAME_PTR_INFO (name))
307 && !pi->pt.anything)
309 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
310 new_pi->pt = pi->pt;
312 /* So can range-info. */
313 if (!POINTER_TYPE_P (TREE_TYPE (name))
314 && SSA_NAME_RANGE_INFO (name))
315 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
316 SSA_NAME_RANGE_INFO (name));
317 if (SSA_NAME_IS_DEFAULT_DEF (name))
319 /* By inlining function having uninitialized variable, we might
320 extend the lifetime (variable might get reused). This cause
321 ICE in the case we end up extending lifetime of SSA name across
322 abnormal edge, but also increase register pressure.
324 We simply initialize all uninitialized vars by 0 except
325 for case we are inlining to very first BB. We can avoid
326 this for all BBs that are not inside strongly connected
327 regions of the CFG, but this is expensive to test. */
328 if (id->entry_bb
329 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
330 && (!SSA_NAME_VAR (name)
331 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
332 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
333 0)->dest
334 || EDGE_COUNT (id->entry_bb->preds) != 1))
336 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
337 gimple *init_stmt;
338 tree zero = build_zero_cst (TREE_TYPE (new_tree));
340 init_stmt = gimple_build_assign (new_tree, zero);
341 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
342 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
344 else
346 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
347 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
351 else
352 insert_decl_map (id, name, new_tree);
353 return new_tree;
356 /* Remap DECL during the copying of the BLOCK tree for the function. */
358 tree
359 remap_decl (tree decl, copy_body_data *id)
361 tree *n;
363 /* We only remap local variables in the current function. */
365 /* See if we have remapped this declaration. */
367 n = id->decl_map->get (decl);
369 if (!n && processing_debug_stmt)
371 processing_debug_stmt = -1;
372 return decl;
375 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
376 necessary DECLs have already been remapped and we do not want to duplicate
377 a decl coming from outside of the sequence we are copying. */
378 if (!n
379 && id->prevent_decl_creation_for_types
380 && id->remapping_type_depth > 0
381 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
382 return decl;
384 /* If we didn't already have an equivalent for this declaration, create one
385 now. */
386 if (!n)
388 /* Make a copy of the variable or label. */
389 tree t = id->copy_decl (decl, id);
391 /* Remember it, so that if we encounter this local entity again
392 we can reuse this copy. Do this early because remap_type may
393 need this decl for TYPE_STUB_DECL. */
394 insert_decl_map (id, decl, t);
396 if (!DECL_P (t))
397 return t;
399 /* Remap types, if necessary. */
400 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
401 if (TREE_CODE (t) == TYPE_DECL)
403 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
405 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
406 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
407 is not set on the TYPE_DECL, for example in LTO mode. */
408 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
410 tree x = build_variant_type_copy (TREE_TYPE (t));
411 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
412 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
413 DECL_ORIGINAL_TYPE (t) = x;
417 /* Remap sizes as necessary. */
418 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
419 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
421 /* If fields, do likewise for offset and qualifier. */
422 if (TREE_CODE (t) == FIELD_DECL)
424 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
425 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
426 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
429 return t;
432 if (id->do_not_unshare)
433 return *n;
434 else
435 return unshare_expr (*n);
438 static tree
439 remap_type_1 (tree type, copy_body_data *id)
441 tree new_tree, t;
443 /* We do need a copy. build and register it now. If this is a pointer or
444 reference type, remap the designated type and make a new pointer or
445 reference type. */
446 if (TREE_CODE (type) == POINTER_TYPE)
448 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
449 TYPE_MODE (type),
450 TYPE_REF_CAN_ALIAS_ALL (type));
451 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
452 new_tree = build_type_attribute_qual_variant (new_tree,
453 TYPE_ATTRIBUTES (type),
454 TYPE_QUALS (type));
455 insert_decl_map (id, type, new_tree);
456 return new_tree;
458 else if (TREE_CODE (type) == REFERENCE_TYPE)
460 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
461 TYPE_MODE (type),
462 TYPE_REF_CAN_ALIAS_ALL (type));
463 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
464 new_tree = build_type_attribute_qual_variant (new_tree,
465 TYPE_ATTRIBUTES (type),
466 TYPE_QUALS (type));
467 insert_decl_map (id, type, new_tree);
468 return new_tree;
470 else
471 new_tree = copy_node (type);
473 insert_decl_map (id, type, new_tree);
475 /* This is a new type, not a copy of an old type. Need to reassociate
476 variants. We can handle everything except the main variant lazily. */
477 t = TYPE_MAIN_VARIANT (type);
478 if (type != t)
480 t = remap_type (t, id);
481 TYPE_MAIN_VARIANT (new_tree) = t;
482 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
483 TYPE_NEXT_VARIANT (t) = new_tree;
485 else
487 TYPE_MAIN_VARIANT (new_tree) = new_tree;
488 TYPE_NEXT_VARIANT (new_tree) = NULL;
491 if (TYPE_STUB_DECL (type))
492 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
494 /* Lazily create pointer and reference types. */
495 TYPE_POINTER_TO (new_tree) = NULL;
496 TYPE_REFERENCE_TO (new_tree) = NULL;
498 /* Copy all types that may contain references to local variables; be sure to
499 preserve sharing in between type and its main variant when possible. */
500 switch (TREE_CODE (new_tree))
502 case INTEGER_TYPE:
503 case REAL_TYPE:
504 case FIXED_POINT_TYPE:
505 case ENUMERAL_TYPE:
506 case BOOLEAN_TYPE:
507 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
509 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
510 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
512 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
513 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
515 else
517 t = TYPE_MIN_VALUE (new_tree);
518 if (t && TREE_CODE (t) != INTEGER_CST)
519 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
521 t = TYPE_MAX_VALUE (new_tree);
522 if (t && TREE_CODE (t) != INTEGER_CST)
523 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
525 return new_tree;
527 case FUNCTION_TYPE:
528 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
529 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
530 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
531 else
532 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
533 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
534 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
535 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
536 else
537 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
538 return new_tree;
540 case ARRAY_TYPE:
541 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
542 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
543 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
544 else
545 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
547 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
549 gcc_checking_assert (TYPE_DOMAIN (type)
550 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
551 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
553 else
555 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
556 /* For array bounds where we have decided not to copy over the bounds
557 variable which isn't used in OpenMP/OpenACC region, change them to
558 an uninitialized VAR_DECL temporary. */
559 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
560 && id->adjust_array_error_bounds
561 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
563 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
564 DECL_ATTRIBUTES (v)
565 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
566 DECL_ATTRIBUTES (v));
567 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
570 break;
572 case RECORD_TYPE:
573 case UNION_TYPE:
574 case QUAL_UNION_TYPE:
575 if (TYPE_MAIN_VARIANT (type) != type
576 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
577 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
578 else
580 tree f, nf = NULL;
582 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
584 t = remap_decl (f, id);
585 DECL_CONTEXT (t) = new_tree;
586 DECL_CHAIN (t) = nf;
587 nf = t;
589 TYPE_FIELDS (new_tree) = nreverse (nf);
591 break;
593 case OFFSET_TYPE:
594 default:
595 /* Shouldn't have been thought variable sized. */
596 gcc_unreachable ();
599 /* All variants of type share the same size, so use the already remaped data. */
600 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
602 tree s = TYPE_SIZE (type);
603 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
604 tree su = TYPE_SIZE_UNIT (type);
605 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
606 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
607 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
608 || s == mvs);
609 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
610 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
611 || su == mvsu);
612 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
613 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
615 else
617 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
618 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
621 return new_tree;
624 /* Helper function for remap_type_2, called through walk_tree. */
626 static tree
627 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
629 copy_body_data *id = (copy_body_data *) data;
631 if (TYPE_P (*tp))
632 *walk_subtrees = 0;
634 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
635 return *tp;
637 return NULL_TREE;
640 /* Return true if TYPE needs to be remapped because remap_decl on any
641 needed embedded decl returns something other than that decl. */
643 static bool
644 remap_type_2 (tree type, copy_body_data *id)
646 tree t;
648 #define RETURN_TRUE_IF_VAR(T) \
649 do \
651 tree _t = (T); \
652 if (_t) \
654 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
655 return true; \
656 if (!TYPE_SIZES_GIMPLIFIED (type) \
657 && walk_tree (&_t, remap_type_3, id, NULL)) \
658 return true; \
661 while (0)
663 switch (TREE_CODE (type))
665 case POINTER_TYPE:
666 case REFERENCE_TYPE:
667 case FUNCTION_TYPE:
668 case METHOD_TYPE:
669 return remap_type_2 (TREE_TYPE (type), id);
671 case INTEGER_TYPE:
672 case REAL_TYPE:
673 case FIXED_POINT_TYPE:
674 case ENUMERAL_TYPE:
675 case BOOLEAN_TYPE:
676 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
677 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
678 return false;
680 case ARRAY_TYPE:
681 if (remap_type_2 (TREE_TYPE (type), id)
682 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
683 return true;
684 break;
686 case RECORD_TYPE:
687 case UNION_TYPE:
688 case QUAL_UNION_TYPE:
689 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
690 if (TREE_CODE (t) == FIELD_DECL)
692 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
693 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
694 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
695 if (TREE_CODE (type) == QUAL_UNION_TYPE)
696 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
698 break;
700 default:
701 return false;
704 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
705 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
706 return false;
707 #undef RETURN_TRUE_IF_VAR
710 tree
711 remap_type (tree type, copy_body_data *id)
713 tree *node;
714 tree tmp;
716 if (type == NULL)
717 return type;
719 /* See if we have remapped this type. */
720 node = id->decl_map->get (type);
721 if (node)
722 return *node;
724 /* The type only needs remapping if it's variably modified. */
725 if (! variably_modified_type_p (type, id->src_fn)
726 /* Don't remap if copy_decl method doesn't always return a new
727 decl and for all embedded decls returns the passed in decl. */
728 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
730 insert_decl_map (id, type, type);
731 return type;
734 id->remapping_type_depth++;
735 tmp = remap_type_1 (type, id);
736 id->remapping_type_depth--;
738 return tmp;
741 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
743 static bool
744 can_be_nonlocal (tree decl, copy_body_data *id)
746 /* We cannot duplicate function decls. */
747 if (TREE_CODE (decl) == FUNCTION_DECL)
748 return true;
750 /* Local static vars must be non-local or we get multiple declaration
751 problems. */
752 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
753 return true;
755 return false;
758 static tree
759 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
760 copy_body_data *id)
762 tree old_var;
763 tree new_decls = NULL_TREE;
765 /* Remap its variables. */
766 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
768 tree new_var;
770 if (can_be_nonlocal (old_var, id))
772 /* We need to add this variable to the local decls as otherwise
773 nothing else will do so. */
774 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
775 add_local_decl (cfun, old_var);
776 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
777 && !DECL_IGNORED_P (old_var)
778 && nonlocalized_list)
779 vec_safe_push (*nonlocalized_list, old_var);
780 continue;
783 /* Remap the variable. */
784 new_var = remap_decl (old_var, id);
786 /* If we didn't remap this variable, we can't mess with its
787 TREE_CHAIN. If we remapped this variable to the return slot, it's
788 already declared somewhere else, so don't declare it here. */
790 if (new_var == id->retvar)
792 else if (!new_var)
794 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
795 && !DECL_IGNORED_P (old_var)
796 && nonlocalized_list)
797 vec_safe_push (*nonlocalized_list, old_var);
799 else
801 gcc_assert (DECL_P (new_var));
802 DECL_CHAIN (new_var) = new_decls;
803 new_decls = new_var;
805 /* Also copy value-expressions. */
806 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
808 tree tem = DECL_VALUE_EXPR (new_var);
809 bool old_regimplify = id->regimplify;
810 id->remapping_type_depth++;
811 walk_tree (&tem, copy_tree_body_r, id, NULL);
812 id->remapping_type_depth--;
813 id->regimplify = old_regimplify;
814 SET_DECL_VALUE_EXPR (new_var, tem);
819 return nreverse (new_decls);
822 /* Copy the BLOCK to contain remapped versions of the variables
823 therein. And hook the new block into the block-tree. */
825 static void
826 remap_block (tree *block, copy_body_data *id)
828 tree old_block;
829 tree new_block;
831 /* Make the new block. */
832 old_block = *block;
833 new_block = make_node (BLOCK);
834 TREE_USED (new_block) = TREE_USED (old_block);
835 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
836 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
837 BLOCK_NONLOCALIZED_VARS (new_block)
838 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
839 *block = new_block;
841 /* Remap its variables. */
842 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
843 &BLOCK_NONLOCALIZED_VARS (new_block),
844 id);
846 if (id->transform_lang_insert_block)
847 id->transform_lang_insert_block (new_block);
849 /* Remember the remapped block. */
850 insert_decl_map (id, old_block, new_block);
853 /* Copy the whole block tree and root it in id->block. */
855 static tree
856 remap_blocks (tree block, copy_body_data *id)
858 tree t;
859 tree new_tree = block;
861 if (!block)
862 return NULL;
864 remap_block (&new_tree, id);
865 gcc_assert (new_tree != block);
866 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
867 prepend_lexical_block (new_tree, remap_blocks (t, id));
868 /* Blocks are in arbitrary order, but make things slightly prettier and do
869 not swap order when producing a copy. */
870 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
871 return new_tree;
874 /* Remap the block tree rooted at BLOCK to nothing. */
876 static void
877 remap_blocks_to_null (tree block, copy_body_data *id)
879 tree t;
880 insert_decl_map (id, block, NULL_TREE);
881 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
882 remap_blocks_to_null (t, id);
885 /* Remap the location info pointed to by LOCUS. */
887 static location_t
888 remap_location (location_t locus, copy_body_data *id)
890 if (LOCATION_BLOCK (locus))
892 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
893 gcc_assert (n);
894 if (*n)
895 return set_block (locus, *n);
898 locus = LOCATION_LOCUS (locus);
900 if (locus != UNKNOWN_LOCATION && id->block)
901 return set_block (locus, id->block);
903 return locus;
906 static void
907 copy_statement_list (tree *tp)
909 tree_stmt_iterator oi, ni;
910 tree new_tree;
912 new_tree = alloc_stmt_list ();
913 ni = tsi_start (new_tree);
914 oi = tsi_start (*tp);
915 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
916 *tp = new_tree;
918 for (; !tsi_end_p (oi); tsi_next (&oi))
920 tree stmt = tsi_stmt (oi);
921 if (TREE_CODE (stmt) == STATEMENT_LIST)
922 /* This copy is not redundant; tsi_link_after will smash this
923 STATEMENT_LIST into the end of the one we're building, and we
924 don't want to do that with the original. */
925 copy_statement_list (&stmt);
926 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
930 static void
931 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
933 tree block = BIND_EXPR_BLOCK (*tp);
934 /* Copy (and replace) the statement. */
935 copy_tree_r (tp, walk_subtrees, NULL);
936 if (block)
938 remap_block (&block, id);
939 BIND_EXPR_BLOCK (*tp) = block;
942 if (BIND_EXPR_VARS (*tp))
943 /* This will remap a lot of the same decls again, but this should be
944 harmless. */
945 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
949 /* Create a new gimple_seq by remapping all the statements in BODY
950 using the inlining information in ID. */
952 static gimple_seq
953 remap_gimple_seq (gimple_seq body, copy_body_data *id)
955 gimple_stmt_iterator si;
956 gimple_seq new_body = NULL;
958 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
960 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
961 gimple_seq_add_seq (&new_body, new_stmts);
964 return new_body;
968 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
969 block using the mapping information in ID. */
971 static gimple *
972 copy_gimple_bind (gbind *stmt, copy_body_data *id)
974 gimple *new_bind;
975 tree new_block, new_vars;
976 gimple_seq body, new_body;
978 /* Copy the statement. Note that we purposely don't use copy_stmt
979 here because we need to remap statements as we copy. */
980 body = gimple_bind_body (stmt);
981 new_body = remap_gimple_seq (body, id);
983 new_block = gimple_bind_block (stmt);
984 if (new_block)
985 remap_block (&new_block, id);
987 /* This will remap a lot of the same decls again, but this should be
988 harmless. */
989 new_vars = gimple_bind_vars (stmt);
990 if (new_vars)
991 new_vars = remap_decls (new_vars, NULL, id);
993 new_bind = gimple_build_bind (new_vars, new_body, new_block);
995 return new_bind;
998 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
1000 static bool
1001 is_parm (tree decl)
1003 if (TREE_CODE (decl) == SSA_NAME)
1005 decl = SSA_NAME_VAR (decl);
1006 if (!decl)
1007 return false;
1010 return (TREE_CODE (decl) == PARM_DECL);
1013 /* Remap the dependence CLIQUE from the source to the destination function
1014 as specified in ID. */
1016 static unsigned short
1017 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1019 if (clique == 0 || processing_debug_stmt)
1020 return 0;
1021 if (!id->dependence_map)
1022 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1023 bool existed;
1024 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1025 if (!existed)
1027 /* Clique 1 is reserved for local ones set by PTA. */
1028 if (cfun->last_clique == 0)
1029 cfun->last_clique = 1;
1030 newc = ++cfun->last_clique;
1032 return newc;
1035 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1036 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1037 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1038 recursing into the children nodes of *TP. */
1040 static tree
1041 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1043 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1044 copy_body_data *id = (copy_body_data *) wi_p->info;
1045 tree fn = id->src_fn;
1047 /* For recursive invocations this is no longer the LHS itself. */
1048 bool is_lhs = wi_p->is_lhs;
1049 wi_p->is_lhs = false;
1051 if (TREE_CODE (*tp) == SSA_NAME)
1053 *tp = remap_ssa_name (*tp, id);
1054 *walk_subtrees = 0;
1055 if (is_lhs)
1056 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1057 return NULL;
1059 else if (auto_var_in_fn_p (*tp, fn))
1061 /* Local variables and labels need to be replaced by equivalent
1062 variables. We don't want to copy static variables; there's
1063 only one of those, no matter how many times we inline the
1064 containing function. Similarly for globals from an outer
1065 function. */
1066 tree new_decl;
1068 /* Remap the declaration. */
1069 new_decl = remap_decl (*tp, id);
1070 gcc_assert (new_decl);
1071 /* Replace this variable with the copy. */
1072 STRIP_TYPE_NOPS (new_decl);
1073 /* ??? The C++ frontend uses void * pointer zero to initialize
1074 any other type. This confuses the middle-end type verification.
1075 As cloned bodies do not go through gimplification again the fixup
1076 there doesn't trigger. */
1077 if (TREE_CODE (new_decl) == INTEGER_CST
1078 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1079 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1080 *tp = new_decl;
1081 *walk_subtrees = 0;
1083 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1084 gcc_unreachable ();
1085 else if (TREE_CODE (*tp) == SAVE_EXPR)
1086 gcc_unreachable ();
1087 else if (TREE_CODE (*tp) == LABEL_DECL
1088 && (!DECL_CONTEXT (*tp)
1089 || decl_function_context (*tp) == id->src_fn))
1090 /* These may need to be remapped for EH handling. */
1091 *tp = remap_decl (*tp, id);
1092 else if (TREE_CODE (*tp) == FIELD_DECL)
1094 /* If the enclosing record type is variably_modified_type_p, the field
1095 has already been remapped. Otherwise, it need not be. */
1096 tree *n = id->decl_map->get (*tp);
1097 if (n)
1098 *tp = *n;
1099 *walk_subtrees = 0;
1101 else if (TYPE_P (*tp))
1102 /* Types may need remapping as well. */
1103 *tp = remap_type (*tp, id);
1104 else if (CONSTANT_CLASS_P (*tp))
1106 /* If this is a constant, we have to copy the node iff the type
1107 will be remapped. copy_tree_r will not copy a constant. */
1108 tree new_type = remap_type (TREE_TYPE (*tp), id);
1110 if (new_type == TREE_TYPE (*tp))
1111 *walk_subtrees = 0;
1113 else if (TREE_CODE (*tp) == INTEGER_CST)
1114 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1115 else
1117 *tp = copy_node (*tp);
1118 TREE_TYPE (*tp) = new_type;
1121 else
1123 /* Otherwise, just copy the node. Note that copy_tree_r already
1124 knows not to copy VAR_DECLs, etc., so this is safe. */
1126 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1128 /* We need to re-canonicalize MEM_REFs from inline substitutions
1129 that can happen when a pointer argument is an ADDR_EXPR.
1130 Recurse here manually to allow that. */
1131 tree ptr = TREE_OPERAND (*tp, 0);
1132 tree type = remap_type (TREE_TYPE (*tp), id);
1133 tree old = *tp;
1134 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1135 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1136 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1137 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1138 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1139 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1141 MR_DEPENDENCE_CLIQUE (*tp)
1142 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1143 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1145 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1146 remapped a parameter as the property might be valid only
1147 for the parameter itself. */
1148 if (TREE_THIS_NOTRAP (old)
1149 && (!is_parm (TREE_OPERAND (old, 0))
1150 || (!id->transform_parameter && is_parm (ptr))))
1151 TREE_THIS_NOTRAP (*tp) = 1;
1152 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1153 *walk_subtrees = 0;
1154 return NULL;
1157 /* Here is the "usual case". Copy this tree node, and then
1158 tweak some special cases. */
1159 copy_tree_r (tp, walk_subtrees, NULL);
1161 if (TREE_CODE (*tp) != OMP_CLAUSE)
1162 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1164 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1166 /* The copied TARGET_EXPR has never been expanded, even if the
1167 original node was expanded already. */
1168 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1169 TREE_OPERAND (*tp, 3) = NULL_TREE;
1171 else if (TREE_CODE (*tp) == ADDR_EXPR)
1173 /* Variable substitution need not be simple. In particular,
1174 the MEM_REF substitution above. Make sure that
1175 TREE_CONSTANT and friends are up-to-date. */
1176 int invariant = is_gimple_min_invariant (*tp);
1177 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1178 recompute_tree_invariant_for_addr_expr (*tp);
1180 /* If this used to be invariant, but is not any longer,
1181 then regimplification is probably needed. */
1182 if (invariant && !is_gimple_min_invariant (*tp))
1183 id->regimplify = true;
1185 *walk_subtrees = 0;
1189 /* Update the TREE_BLOCK for the cloned expr. */
1190 if (EXPR_P (*tp))
1192 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1193 tree old_block = TREE_BLOCK (*tp);
1194 if (old_block)
1196 tree *n;
1197 n = id->decl_map->get (TREE_BLOCK (*tp));
1198 if (n)
1199 new_block = *n;
1201 TREE_SET_BLOCK (*tp, new_block);
1204 /* Keep iterating. */
1205 return NULL_TREE;
1209 /* Called from copy_body_id via walk_tree. DATA is really a
1210 `copy_body_data *'. */
1212 tree
1213 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1215 copy_body_data *id = (copy_body_data *) data;
1216 tree fn = id->src_fn;
1217 tree new_block;
1219 /* Begin by recognizing trees that we'll completely rewrite for the
1220 inlining context. Our output for these trees is completely
1221 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1222 into an edge). Further down, we'll handle trees that get
1223 duplicated and/or tweaked. */
1225 /* When requested, RETURN_EXPRs should be transformed to just the
1226 contained MODIFY_EXPR. The branch semantics of the return will
1227 be handled elsewhere by manipulating the CFG rather than a statement. */
1228 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1230 tree assignment = TREE_OPERAND (*tp, 0);
1232 /* If we're returning something, just turn that into an
1233 assignment into the equivalent of the original RESULT_DECL.
1234 If the "assignment" is just the result decl, the result
1235 decl has already been set (e.g. a recent "foo (&result_decl,
1236 ...)"); just toss the entire RETURN_EXPR. */
1237 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1239 /* Replace the RETURN_EXPR with (a copy of) the
1240 MODIFY_EXPR hanging underneath. */
1241 *tp = copy_node (assignment);
1243 else /* Else the RETURN_EXPR returns no value. */
1245 *tp = NULL;
1246 return (tree) (void *)1;
1249 else if (TREE_CODE (*tp) == SSA_NAME)
1251 *tp = remap_ssa_name (*tp, id);
1252 *walk_subtrees = 0;
1253 return NULL;
1256 /* Local variables and labels need to be replaced by equivalent
1257 variables. We don't want to copy static variables; there's only
1258 one of those, no matter how many times we inline the containing
1259 function. Similarly for globals from an outer function. */
1260 else if (auto_var_in_fn_p (*tp, fn))
1262 tree new_decl;
1264 /* Remap the declaration. */
1265 new_decl = remap_decl (*tp, id);
1266 gcc_assert (new_decl);
1267 /* Replace this variable with the copy. */
1268 STRIP_TYPE_NOPS (new_decl);
1269 *tp = new_decl;
1270 *walk_subtrees = 0;
1272 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1273 copy_statement_list (tp);
1274 else if (TREE_CODE (*tp) == SAVE_EXPR
1275 || TREE_CODE (*tp) == TARGET_EXPR)
1276 remap_save_expr (tp, id->decl_map, walk_subtrees);
1277 else if (TREE_CODE (*tp) == LABEL_DECL
1278 && (! DECL_CONTEXT (*tp)
1279 || decl_function_context (*tp) == id->src_fn))
1280 /* These may need to be remapped for EH handling. */
1281 *tp = remap_decl (*tp, id);
1282 else if (TREE_CODE (*tp) == BIND_EXPR)
1283 copy_bind_expr (tp, walk_subtrees, id);
1284 /* Types may need remapping as well. */
1285 else if (TYPE_P (*tp))
1286 *tp = remap_type (*tp, id);
1288 /* If this is a constant, we have to copy the node iff the type will be
1289 remapped. copy_tree_r will not copy a constant. */
1290 else if (CONSTANT_CLASS_P (*tp))
1292 tree new_type = remap_type (TREE_TYPE (*tp), id);
1294 if (new_type == TREE_TYPE (*tp))
1295 *walk_subtrees = 0;
1297 else if (TREE_CODE (*tp) == INTEGER_CST)
1298 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1299 else
1301 *tp = copy_node (*tp);
1302 TREE_TYPE (*tp) = new_type;
1306 /* Otherwise, just copy the node. Note that copy_tree_r already
1307 knows not to copy VAR_DECLs, etc., so this is safe. */
1308 else
1310 /* Here we handle trees that are not completely rewritten.
1311 First we detect some inlining-induced bogosities for
1312 discarding. */
1313 if (TREE_CODE (*tp) == MODIFY_EXPR
1314 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1315 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1317 /* Some assignments VAR = VAR; don't generate any rtl code
1318 and thus don't count as variable modification. Avoid
1319 keeping bogosities like 0 = 0. */
1320 tree decl = TREE_OPERAND (*tp, 0), value;
1321 tree *n;
1323 n = id->decl_map->get (decl);
1324 if (n)
1326 value = *n;
1327 STRIP_TYPE_NOPS (value);
1328 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1330 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1331 return copy_tree_body_r (tp, walk_subtrees, data);
1335 else if (TREE_CODE (*tp) == INDIRECT_REF)
1337 /* Get rid of *& from inline substitutions that can happen when a
1338 pointer argument is an ADDR_EXPR. */
1339 tree decl = TREE_OPERAND (*tp, 0);
1340 tree *n = id->decl_map->get (decl);
1341 if (n)
1343 /* If we happen to get an ADDR_EXPR in n->value, strip
1344 it manually here as we'll eventually get ADDR_EXPRs
1345 which lie about their types pointed to. In this case
1346 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1347 but we absolutely rely on that. As fold_indirect_ref
1348 does other useful transformations, try that first, though. */
1349 tree type = TREE_TYPE (*tp);
1350 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1351 tree old = *tp;
1352 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1353 if (! *tp)
1355 type = remap_type (type, id);
1356 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1359 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1360 /* ??? We should either assert here or build
1361 a VIEW_CONVERT_EXPR instead of blindly leaking
1362 incompatible types to our IL. */
1363 if (! *tp)
1364 *tp = TREE_OPERAND (ptr, 0);
1366 else
1368 *tp = build1 (INDIRECT_REF, type, ptr);
1369 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1370 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1371 TREE_READONLY (*tp) = TREE_READONLY (old);
1372 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1373 have remapped a parameter as the property might be
1374 valid only for the parameter itself. */
1375 if (TREE_THIS_NOTRAP (old)
1376 && (!is_parm (TREE_OPERAND (old, 0))
1377 || (!id->transform_parameter && is_parm (ptr))))
1378 TREE_THIS_NOTRAP (*tp) = 1;
1381 *walk_subtrees = 0;
1382 return NULL;
1385 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1387 /* We need to re-canonicalize MEM_REFs from inline substitutions
1388 that can happen when a pointer argument is an ADDR_EXPR.
1389 Recurse here manually to allow that. */
1390 tree ptr = TREE_OPERAND (*tp, 0);
1391 tree type = remap_type (TREE_TYPE (*tp), id);
1392 tree old = *tp;
1393 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1394 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1395 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1396 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1397 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1398 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1400 MR_DEPENDENCE_CLIQUE (*tp)
1401 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1402 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1404 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1405 remapped a parameter as the property might be valid only
1406 for the parameter itself. */
1407 if (TREE_THIS_NOTRAP (old)
1408 && (!is_parm (TREE_OPERAND (old, 0))
1409 || (!id->transform_parameter && is_parm (ptr))))
1410 TREE_THIS_NOTRAP (*tp) = 1;
1411 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1412 *walk_subtrees = 0;
1413 return NULL;
1416 /* Here is the "usual case". Copy this tree node, and then
1417 tweak some special cases. */
1418 copy_tree_r (tp, walk_subtrees, NULL);
1420 /* If EXPR has block defined, map it to newly constructed block.
1421 When inlining we want EXPRs without block appear in the block
1422 of function call if we are not remapping a type. */
1423 if (EXPR_P (*tp))
1425 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1426 if (TREE_BLOCK (*tp))
1428 tree *n;
1429 n = id->decl_map->get (TREE_BLOCK (*tp));
1430 if (n)
1431 new_block = *n;
1433 TREE_SET_BLOCK (*tp, new_block);
1436 if (TREE_CODE (*tp) != OMP_CLAUSE)
1437 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1439 /* The copied TARGET_EXPR has never been expanded, even if the
1440 original node was expanded already. */
1441 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1443 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1444 TREE_OPERAND (*tp, 3) = NULL_TREE;
1447 /* Variable substitution need not be simple. In particular, the
1448 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1449 and friends are up-to-date. */
1450 else if (TREE_CODE (*tp) == ADDR_EXPR)
1452 int invariant = is_gimple_min_invariant (*tp);
1453 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1455 /* Handle the case where we substituted an INDIRECT_REF
1456 into the operand of the ADDR_EXPR. */
1457 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1458 && !id->do_not_fold)
1460 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1461 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1462 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1463 *tp = t;
1465 else
1466 recompute_tree_invariant_for_addr_expr (*tp);
1468 /* If this used to be invariant, but is not any longer,
1469 then regimplification is probably needed. */
1470 if (invariant && !is_gimple_min_invariant (*tp))
1471 id->regimplify = true;
1473 *walk_subtrees = 0;
1477 /* Keep iterating. */
1478 return NULL_TREE;
1481 /* Helper for remap_gimple_stmt. Given an EH region number for the
1482 source function, map that to the duplicate EH region number in
1483 the destination function. */
1485 static int
1486 remap_eh_region_nr (int old_nr, copy_body_data *id)
1488 eh_region old_r, new_r;
1490 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1491 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1493 return new_r->index;
1496 /* Similar, but operate on INTEGER_CSTs. */
1498 static tree
1499 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1501 int old_nr, new_nr;
1503 old_nr = tree_to_shwi (old_t_nr);
1504 new_nr = remap_eh_region_nr (old_nr, id);
1506 return build_int_cst (integer_type_node, new_nr);
1509 /* Helper for copy_bb. Remap statement STMT using the inlining
1510 information in ID. Return the new statement copy. */
1512 static gimple_seq
1513 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1515 gimple *copy = NULL;
1516 struct walk_stmt_info wi;
1517 bool skip_first = false;
1518 gimple_seq stmts = NULL;
1520 if (is_gimple_debug (stmt)
1521 && (gimple_debug_nonbind_marker_p (stmt)
1522 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1523 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1524 return NULL;
1526 /* Begin by recognizing trees that we'll completely rewrite for the
1527 inlining context. Our output for these trees is completely
1528 different from our input (e.g. RETURN_EXPR is deleted and morphs
1529 into an edge). Further down, we'll handle trees that get
1530 duplicated and/or tweaked. */
1532 /* When requested, GIMPLE_RETURN should be transformed to just the
1533 contained GIMPLE_ASSIGN. The branch semantics of the return will
1534 be handled elsewhere by manipulating the CFG rather than the
1535 statement. */
1536 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1538 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1540 /* If we're returning something, just turn that into an
1541 assignment to the equivalent of the original RESULT_DECL.
1542 If RETVAL is just the result decl, the result decl has
1543 already been set (e.g. a recent "foo (&result_decl, ...)");
1544 just toss the entire GIMPLE_RETURN. Likewise for when the
1545 call doesn't want the return value. */
1546 if (retval
1547 && (TREE_CODE (retval) != RESULT_DECL
1548 && (!id->call_stmt
1549 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1550 && (TREE_CODE (retval) != SSA_NAME
1551 || ! SSA_NAME_VAR (retval)
1552 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1554 copy = gimple_build_assign (id->do_not_unshare
1555 ? id->retvar : unshare_expr (id->retvar),
1556 retval);
1557 /* id->retvar is already substituted. Skip it on later remapping. */
1558 skip_first = true;
1560 else
1561 return NULL;
1563 else if (gimple_has_substatements (stmt))
1565 gimple_seq s1, s2;
1567 /* When cloning bodies from the C++ front end, we will be handed bodies
1568 in High GIMPLE form. Handle here all the High GIMPLE statements that
1569 have embedded statements. */
1570 switch (gimple_code (stmt))
1572 case GIMPLE_BIND:
1573 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1574 break;
1576 case GIMPLE_CATCH:
1578 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1579 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1580 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1582 break;
1584 case GIMPLE_EH_FILTER:
1585 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1586 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1587 break;
1589 case GIMPLE_TRY:
1590 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1591 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1592 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1593 break;
1595 case GIMPLE_WITH_CLEANUP_EXPR:
1596 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1597 copy = gimple_build_wce (s1);
1598 break;
1600 case GIMPLE_OMP_PARALLEL:
1602 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1603 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1604 copy = gimple_build_omp_parallel
1605 (s1,
1606 gimple_omp_parallel_clauses (omp_par_stmt),
1607 gimple_omp_parallel_child_fn (omp_par_stmt),
1608 gimple_omp_parallel_data_arg (omp_par_stmt));
1610 break;
1612 case GIMPLE_OMP_TASK:
1613 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1614 copy = gimple_build_omp_task
1615 (s1,
1616 gimple_omp_task_clauses (stmt),
1617 gimple_omp_task_child_fn (stmt),
1618 gimple_omp_task_data_arg (stmt),
1619 gimple_omp_task_copy_fn (stmt),
1620 gimple_omp_task_arg_size (stmt),
1621 gimple_omp_task_arg_align (stmt));
1622 break;
1624 case GIMPLE_OMP_FOR:
1625 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1626 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1627 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1628 gimple_omp_for_clauses (stmt),
1629 gimple_omp_for_collapse (stmt), s2);
1631 size_t i;
1632 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1634 gimple_omp_for_set_index (copy, i,
1635 gimple_omp_for_index (stmt, i));
1636 gimple_omp_for_set_initial (copy, i,
1637 gimple_omp_for_initial (stmt, i));
1638 gimple_omp_for_set_final (copy, i,
1639 gimple_omp_for_final (stmt, i));
1640 gimple_omp_for_set_incr (copy, i,
1641 gimple_omp_for_incr (stmt, i));
1642 gimple_omp_for_set_cond (copy, i,
1643 gimple_omp_for_cond (stmt, i));
1646 break;
1648 case GIMPLE_OMP_MASTER:
1649 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1650 copy = gimple_build_omp_master (s1);
1651 break;
1653 case GIMPLE_OMP_TASKGROUP:
1654 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1655 copy = gimple_build_omp_taskgroup
1656 (s1, gimple_omp_taskgroup_clauses (stmt));
1657 break;
1659 case GIMPLE_OMP_ORDERED:
1660 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1661 copy = gimple_build_omp_ordered
1662 (s1,
1663 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1664 break;
1666 case GIMPLE_OMP_SCAN:
1667 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1668 copy = gimple_build_omp_scan
1669 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1670 break;
1672 case GIMPLE_OMP_SECTION:
1673 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1674 copy = gimple_build_omp_section (s1);
1675 break;
1677 case GIMPLE_OMP_SECTIONS:
1678 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1679 copy = gimple_build_omp_sections
1680 (s1, gimple_omp_sections_clauses (stmt));
1681 break;
1683 case GIMPLE_OMP_SINGLE:
1684 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1685 copy = gimple_build_omp_single
1686 (s1, gimple_omp_single_clauses (stmt));
1687 break;
1689 case GIMPLE_OMP_TARGET:
1690 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1691 copy = gimple_build_omp_target
1692 (s1, gimple_omp_target_kind (stmt),
1693 gimple_omp_target_clauses (stmt));
1694 break;
1696 case GIMPLE_OMP_TEAMS:
1697 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1698 copy = gimple_build_omp_teams
1699 (s1, gimple_omp_teams_clauses (stmt));
1700 break;
1702 case GIMPLE_OMP_CRITICAL:
1703 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1704 copy = gimple_build_omp_critical (s1,
1705 gimple_omp_critical_name
1706 (as_a <gomp_critical *> (stmt)),
1707 gimple_omp_critical_clauses
1708 (as_a <gomp_critical *> (stmt)));
1709 break;
1711 case GIMPLE_TRANSACTION:
1713 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1714 gtransaction *new_trans_stmt;
1715 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1716 id);
1717 copy = new_trans_stmt = gimple_build_transaction (s1);
1718 gimple_transaction_set_subcode (new_trans_stmt,
1719 gimple_transaction_subcode (old_trans_stmt));
1720 gimple_transaction_set_label_norm (new_trans_stmt,
1721 gimple_transaction_label_norm (old_trans_stmt));
1722 gimple_transaction_set_label_uninst (new_trans_stmt,
1723 gimple_transaction_label_uninst (old_trans_stmt));
1724 gimple_transaction_set_label_over (new_trans_stmt,
1725 gimple_transaction_label_over (old_trans_stmt));
1727 break;
1729 default:
1730 gcc_unreachable ();
1733 else
1735 if (gimple_assign_copy_p (stmt)
1736 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1737 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1739 /* Here we handle statements that are not completely rewritten.
1740 First we detect some inlining-induced bogosities for
1741 discarding. */
1743 /* Some assignments VAR = VAR; don't generate any rtl code
1744 and thus don't count as variable modification. Avoid
1745 keeping bogosities like 0 = 0. */
1746 tree decl = gimple_assign_lhs (stmt), value;
1747 tree *n;
1749 n = id->decl_map->get (decl);
1750 if (n)
1752 value = *n;
1753 STRIP_TYPE_NOPS (value);
1754 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1755 return NULL;
1759 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1760 in a block that we aren't copying during tree_function_versioning,
1761 just drop the clobber stmt. */
1762 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1764 tree lhs = gimple_assign_lhs (stmt);
1765 if (TREE_CODE (lhs) == MEM_REF
1766 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1768 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1769 if (gimple_bb (def_stmt)
1770 && !bitmap_bit_p (id->blocks_to_copy,
1771 gimple_bb (def_stmt)->index))
1772 return NULL;
1776 /* We do not allow CLOBBERs of handled components. In case
1777 returned value is stored via such handled component, remove
1778 the clobber so stmt verifier is happy. */
1779 if (gimple_clobber_p (stmt)
1780 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1782 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1783 if (!DECL_P (remapped)
1784 && TREE_CODE (remapped) != MEM_REF)
1785 return NULL;
1788 if (gimple_debug_bind_p (stmt))
1790 gdebug *copy
1791 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1792 gimple_debug_bind_get_value (stmt),
1793 stmt);
1794 if (id->reset_location)
1795 gimple_set_location (copy, input_location);
1796 id->debug_stmts.safe_push (copy);
1797 gimple_seq_add_stmt (&stmts, copy);
1798 return stmts;
1800 if (gimple_debug_source_bind_p (stmt))
1802 gdebug *copy = gimple_build_debug_source_bind
1803 (gimple_debug_source_bind_get_var (stmt),
1804 gimple_debug_source_bind_get_value (stmt),
1805 stmt);
1806 if (id->reset_location)
1807 gimple_set_location (copy, input_location);
1808 id->debug_stmts.safe_push (copy);
1809 gimple_seq_add_stmt (&stmts, copy);
1810 return stmts;
1812 if (gimple_debug_nonbind_marker_p (stmt))
1814 /* If the inlined function has too many debug markers,
1815 don't copy them. */
1816 if (id->src_cfun->debug_marker_count
1817 > param_max_debug_marker_count)
1818 return stmts;
1820 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1821 if (id->reset_location)
1822 gimple_set_location (copy, input_location);
1823 id->debug_stmts.safe_push (copy);
1824 gimple_seq_add_stmt (&stmts, copy);
1825 return stmts;
1828 /* Create a new deep copy of the statement. */
1829 copy = gimple_copy (stmt);
1831 /* Clear flags that need revisiting. */
1832 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1834 if (gimple_call_tail_p (call_stmt))
1835 gimple_call_set_tail (call_stmt, false);
1836 if (gimple_call_from_thunk_p (call_stmt))
1837 gimple_call_set_from_thunk (call_stmt, false);
1838 if (gimple_call_internal_p (call_stmt))
1839 switch (gimple_call_internal_fn (call_stmt))
1841 case IFN_GOMP_SIMD_LANE:
1842 case IFN_GOMP_SIMD_VF:
1843 case IFN_GOMP_SIMD_LAST_LANE:
1844 case IFN_GOMP_SIMD_ORDERED_START:
1845 case IFN_GOMP_SIMD_ORDERED_END:
1846 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1847 break;
1848 default:
1849 break;
1853 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1854 RESX and EH_DISPATCH. */
1855 if (id->eh_map)
1856 switch (gimple_code (copy))
1858 case GIMPLE_CALL:
1860 tree r, fndecl = gimple_call_fndecl (copy);
1861 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1862 switch (DECL_FUNCTION_CODE (fndecl))
1864 case BUILT_IN_EH_COPY_VALUES:
1865 r = gimple_call_arg (copy, 1);
1866 r = remap_eh_region_tree_nr (r, id);
1867 gimple_call_set_arg (copy, 1, r);
1868 /* FALLTHRU */
1870 case BUILT_IN_EH_POINTER:
1871 case BUILT_IN_EH_FILTER:
1872 r = gimple_call_arg (copy, 0);
1873 r = remap_eh_region_tree_nr (r, id);
1874 gimple_call_set_arg (copy, 0, r);
1875 break;
1877 default:
1878 break;
1881 /* Reset alias info if we didn't apply measures to
1882 keep it valid over inlining by setting DECL_PT_UID. */
1883 if (!id->src_cfun->gimple_df
1884 || !id->src_cfun->gimple_df->ipa_pta)
1885 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1887 break;
1889 case GIMPLE_RESX:
1891 gresx *resx_stmt = as_a <gresx *> (copy);
1892 int r = gimple_resx_region (resx_stmt);
1893 r = remap_eh_region_nr (r, id);
1894 gimple_resx_set_region (resx_stmt, r);
1896 break;
1898 case GIMPLE_EH_DISPATCH:
1900 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1901 int r = gimple_eh_dispatch_region (eh_dispatch);
1902 r = remap_eh_region_nr (r, id);
1903 gimple_eh_dispatch_set_region (eh_dispatch, r);
1905 break;
1907 default:
1908 break;
1912 /* If STMT has a block defined, map it to the newly constructed block. */
1913 if (tree block = gimple_block (copy))
1915 tree *n;
1916 n = id->decl_map->get (block);
1917 gcc_assert (n);
1918 gimple_set_block (copy, *n);
1920 if (id->param_body_adjs)
1922 gimple_seq extra_stmts = NULL;
1923 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
1924 if (!gimple_seq_empty_p (extra_stmts))
1926 memset (&wi, 0, sizeof (wi));
1927 wi.info = id;
1928 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1929 !gsi_end_p (egsi);
1930 gsi_next (&egsi))
1931 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1932 gimple_seq_add_seq (&stmts, extra_stmts);
1936 if (id->reset_location)
1937 gimple_set_location (copy, input_location);
1939 /* Debug statements ought to be rebuilt and not copied. */
1940 gcc_checking_assert (!is_gimple_debug (copy));
1942 /* Remap all the operands in COPY. */
1943 memset (&wi, 0, sizeof (wi));
1944 wi.info = id;
1945 if (skip_first)
1946 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1947 else
1948 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1950 /* Clear the copied virtual operands. We are not remapping them here
1951 but are going to recreate them from scratch. */
1952 if (gimple_has_mem_ops (copy))
1954 gimple_set_vdef (copy, NULL_TREE);
1955 gimple_set_vuse (copy, NULL_TREE);
1958 gimple_seq_add_stmt (&stmts, copy);
1959 return stmts;
1963 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1964 later */
1966 static basic_block
1967 copy_bb (copy_body_data *id, basic_block bb,
1968 profile_count num, profile_count den)
1970 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1971 basic_block copy_basic_block;
1972 tree decl;
1973 basic_block prev;
1975 profile_count::adjust_for_ipa_scaling (&num, &den);
1977 /* Search for previous copied basic block. */
1978 prev = bb->prev_bb;
1979 while (!prev->aux)
1980 prev = prev->prev_bb;
1982 /* create_basic_block() will append every new block to
1983 basic_block_info automatically. */
1984 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1985 copy_basic_block->count = bb->count.apply_scale (num, den);
1987 copy_gsi = gsi_start_bb (copy_basic_block);
1989 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1991 gimple_seq stmts;
1992 gimple *stmt = gsi_stmt (gsi);
1993 gimple *orig_stmt = stmt;
1994 gimple_stmt_iterator stmts_gsi;
1995 bool stmt_added = false;
1997 id->regimplify = false;
1998 stmts = remap_gimple_stmt (stmt, id);
2000 if (gimple_seq_empty_p (stmts))
2001 continue;
2003 seq_gsi = copy_gsi;
2005 for (stmts_gsi = gsi_start (stmts);
2006 !gsi_end_p (stmts_gsi); )
2008 stmt = gsi_stmt (stmts_gsi);
2010 /* Advance iterator now before stmt is moved to seq_gsi. */
2011 gsi_next (&stmts_gsi);
2013 if (gimple_nop_p (stmt))
2014 continue;
2016 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2017 orig_stmt);
2019 /* With return slot optimization we can end up with
2020 non-gimple (foo *)&this->m, fix that here. */
2021 if (is_gimple_assign (stmt)
2022 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2023 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2025 tree new_rhs;
2026 new_rhs = force_gimple_operand_gsi (&seq_gsi,
2027 gimple_assign_rhs1 (stmt),
2028 true, NULL, false,
2029 GSI_CONTINUE_LINKING);
2030 gimple_assign_set_rhs1 (stmt, new_rhs);
2031 id->regimplify = false;
2034 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2036 if (id->regimplify)
2037 gimple_regimplify_operands (stmt, &seq_gsi);
2039 stmt_added = true;
2042 if (!stmt_added)
2043 continue;
2045 /* If copy_basic_block has been empty at the start of this iteration,
2046 call gsi_start_bb again to get at the newly added statements. */
2047 if (gsi_end_p (copy_gsi))
2048 copy_gsi = gsi_start_bb (copy_basic_block);
2049 else
2050 gsi_next (&copy_gsi);
2052 /* Process the new statement. The call to gimple_regimplify_operands
2053 possibly turned the statement into multiple statements, we
2054 need to process all of them. */
2057 tree fn;
2058 gcall *call_stmt;
2060 stmt = gsi_stmt (copy_gsi);
2061 call_stmt = dyn_cast <gcall *> (stmt);
2062 if (call_stmt
2063 && gimple_call_va_arg_pack_p (call_stmt)
2064 && id->call_stmt
2065 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2067 /* __builtin_va_arg_pack () should be replaced by
2068 all arguments corresponding to ... in the caller. */
2069 tree p;
2070 gcall *new_call;
2071 vec<tree> argarray;
2072 size_t nargs = gimple_call_num_args (id->call_stmt);
2073 size_t n;
2075 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2076 nargs--;
2078 /* Create the new array of arguments. */
2079 n = nargs + gimple_call_num_args (call_stmt);
2080 argarray.create (n);
2081 argarray.safe_grow_cleared (n);
2083 /* Copy all the arguments before '...' */
2084 memcpy (argarray.address (),
2085 gimple_call_arg_ptr (call_stmt, 0),
2086 gimple_call_num_args (call_stmt) * sizeof (tree));
2088 /* Append the arguments passed in '...' */
2089 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2090 gimple_call_arg_ptr (id->call_stmt, 0)
2091 + (gimple_call_num_args (id->call_stmt) - nargs),
2092 nargs * sizeof (tree));
2094 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2095 argarray);
2097 argarray.release ();
2099 /* Copy all GIMPLE_CALL flags, location and block, except
2100 GF_CALL_VA_ARG_PACK. */
2101 gimple_call_copy_flags (new_call, call_stmt);
2102 gimple_call_set_va_arg_pack (new_call, false);
2103 /* location includes block. */
2104 gimple_set_location (new_call, gimple_location (stmt));
2105 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2107 gsi_replace (&copy_gsi, new_call, false);
2108 stmt = new_call;
2110 else if (call_stmt
2111 && id->call_stmt
2112 && (decl = gimple_call_fndecl (stmt))
2113 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2115 /* __builtin_va_arg_pack_len () should be replaced by
2116 the number of anonymous arguments. */
2117 size_t nargs = gimple_call_num_args (id->call_stmt);
2118 tree count, p;
2119 gimple *new_stmt;
2121 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2122 nargs--;
2124 if (!gimple_call_lhs (stmt))
2126 /* Drop unused calls. */
2127 gsi_remove (&copy_gsi, false);
2128 continue;
2130 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2132 count = build_int_cst (integer_type_node, nargs);
2133 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2134 gsi_replace (&copy_gsi, new_stmt, false);
2135 stmt = new_stmt;
2137 else if (nargs != 0)
2139 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2140 count = build_int_cst (integer_type_node, nargs);
2141 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2142 PLUS_EXPR, newlhs, count);
2143 gimple_call_set_lhs (stmt, newlhs);
2144 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2147 else if (call_stmt
2148 && id->call_stmt
2149 && gimple_call_internal_p (stmt)
2150 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2152 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2153 gsi_remove (&copy_gsi, false);
2154 continue;
2157 /* Statements produced by inlining can be unfolded, especially
2158 when we constant propagated some operands. We can't fold
2159 them right now for two reasons:
2160 1) folding require SSA_NAME_DEF_STMTs to be correct
2161 2) we can't change function calls to builtins.
2162 So we just mark statement for later folding. We mark
2163 all new statements, instead just statements that has changed
2164 by some nontrivial substitution so even statements made
2165 foldable indirectly are updated. If this turns out to be
2166 expensive, copy_body can be told to watch for nontrivial
2167 changes. */
2168 if (id->statements_to_fold)
2169 id->statements_to_fold->add (stmt);
2171 /* We're duplicating a CALL_EXPR. Find any corresponding
2172 callgraph edges and update or duplicate them. */
2173 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2175 struct cgraph_edge *edge;
2177 switch (id->transform_call_graph_edges)
2179 case CB_CGE_DUPLICATE:
2180 edge = id->src_node->get_edge (orig_stmt);
2181 if (edge)
2183 struct cgraph_edge *old_edge = edge;
2184 profile_count old_cnt = edge->count;
2185 edge = edge->clone (id->dst_node, call_stmt,
2186 gimple_uid (stmt),
2187 num, den,
2188 true);
2190 /* A speculative call is consist of edges - indirect edge
2191 and direct edges (one indirect edeg may has multiple
2192 direct edges). Duplicate the whole thing and
2193 distribute frequencies accordingly. */
2194 if (edge->speculative)
2196 struct cgraph_edge *direct, *indirect;
2197 struct ipa_ref *ref;
2199 gcc_assert (!edge->indirect_unknown_callee);
2200 old_edge->speculative_call_info (direct, indirect, ref);
2201 while (old_edge->next_callee
2202 && old_edge->next_callee->speculative
2203 && indirect->num_speculative_call_targets_p ()
2204 > 1)
2206 id->dst_node->clone_reference (ref, stmt);
2208 edge = old_edge->next_callee;
2209 edge = edge->clone (id->dst_node, call_stmt,
2210 gimple_uid (stmt), num, den,
2211 true);
2212 old_edge = old_edge->next_callee;
2213 gcc_assert (!edge->indirect_unknown_callee);
2215 /* If the indirect edge has multiple speculative
2216 calls, iterate through all direct calls
2217 associated to the speculative call and clone
2218 all related direct edges before cloning the
2219 related indirect edge. */
2220 old_edge->speculative_call_info (direct, indirect,
2221 ref);
2224 profile_count indir_cnt = indirect->count;
2226 /* Duplicate the indirect edge after all direct edges
2227 cloned. */
2228 indirect = indirect->clone (id->dst_node, call_stmt,
2229 gimple_uid (stmt),
2230 num, den,
2231 true);
2233 profile_probability prob
2234 = indir_cnt.probability_in (old_cnt + indir_cnt);
2235 indirect->count
2236 = copy_basic_block->count.apply_probability (prob);
2237 edge->count = copy_basic_block->count - indirect->count;
2238 id->dst_node->clone_reference (ref, stmt);
2240 else
2241 edge->count = copy_basic_block->count;
2243 break;
2245 case CB_CGE_MOVE_CLONES:
2246 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2247 call_stmt);
2248 edge = id->dst_node->get_edge (stmt);
2249 break;
2251 case CB_CGE_MOVE:
2252 edge = id->dst_node->get_edge (orig_stmt);
2253 if (edge)
2254 edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2255 break;
2257 default:
2258 gcc_unreachable ();
2261 /* Constant propagation on argument done during inlining
2262 may create new direct call. Produce an edge for it. */
2263 if ((!edge
2264 || (edge->indirect_inlining_edge
2265 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2266 && id->dst_node->definition
2267 && (fn = gimple_call_fndecl (stmt)) != NULL)
2269 struct cgraph_node *dest = cgraph_node::get_create (fn);
2271 /* We have missing edge in the callgraph. This can happen
2272 when previous inlining turned an indirect call into a
2273 direct call by constant propagating arguments or we are
2274 producing dead clone (for further cloning). In all
2275 other cases we hit a bug (incorrect node sharing is the
2276 most common reason for missing edges). */
2277 gcc_assert (!dest->definition
2278 || dest->address_taken
2279 || !id->src_node->definition
2280 || !id->dst_node->definition);
2281 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2282 id->dst_node->create_edge_including_clones
2283 (dest, orig_stmt, call_stmt, bb->count,
2284 CIF_ORIGINALLY_INDIRECT_CALL);
2285 else
2286 id->dst_node->create_edge (dest, call_stmt,
2287 bb->count)->inline_failed
2288 = CIF_ORIGINALLY_INDIRECT_CALL;
2289 if (dump_file)
2291 fprintf (dump_file, "Created new direct edge to %s\n",
2292 dest->dump_name ());
2296 notice_special_calls (as_a <gcall *> (stmt));
2299 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2300 id->eh_map, id->eh_lp_nr);
2302 gsi_next (&copy_gsi);
2304 while (!gsi_end_p (copy_gsi));
2306 copy_gsi = gsi_last_bb (copy_basic_block);
2309 return copy_basic_block;
2312 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2313 form is quite easy, since dominator relationship for old basic blocks does
2314 not change.
2316 There is however exception where inlining might change dominator relation
2317 across EH edges from basic block within inlined functions destinating
2318 to landing pads in function we inline into.
2320 The function fills in PHI_RESULTs of such PHI nodes if they refer
2321 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2322 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2323 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2324 set, and this means that there will be no overlapping live ranges
2325 for the underlying symbol.
2327 This might change in future if we allow redirecting of EH edges and
2328 we might want to change way build CFG pre-inlining to include
2329 all the possible edges then. */
2330 static void
2331 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2332 bool can_throw, bool nonlocal_goto)
2334 edge e;
2335 edge_iterator ei;
2337 FOR_EACH_EDGE (e, ei, bb->succs)
2338 if (!e->dest->aux
2339 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2341 gphi *phi;
2342 gphi_iterator si;
2344 if (!nonlocal_goto)
2345 gcc_assert (e->flags & EDGE_EH);
2347 if (!can_throw)
2348 gcc_assert (!(e->flags & EDGE_EH));
2350 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2352 edge re;
2354 phi = si.phi ();
2356 /* For abnormal goto/call edges the receiver can be the
2357 ENTRY_BLOCK. Do not assert this cannot happen. */
2359 gcc_assert ((e->flags & EDGE_EH)
2360 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2362 re = find_edge (ret_bb, e->dest);
2363 gcc_checking_assert (re);
2364 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2365 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2367 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2368 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2373 /* Insert clobbers for automatic variables of inlined ID->src_fn
2374 function at the start of basic block ID->eh_landing_pad_dest. */
2376 static void
2377 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2379 tree var;
2380 basic_block bb = id->eh_landing_pad_dest;
2381 live_vars_map *vars = NULL;
2382 unsigned int cnt = 0;
2383 unsigned int i;
2384 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2385 if (VAR_P (var)
2386 && !DECL_HARD_REGISTER (var)
2387 && !TREE_THIS_VOLATILE (var)
2388 && !DECL_HAS_VALUE_EXPR_P (var)
2389 && !is_gimple_reg (var)
2390 && auto_var_in_fn_p (var, id->src_fn)
2391 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2393 tree *t = id->decl_map->get (var);
2394 if (!t)
2395 continue;
2396 tree new_var = *t;
2397 if (VAR_P (new_var)
2398 && !DECL_HARD_REGISTER (new_var)
2399 && !TREE_THIS_VOLATILE (new_var)
2400 && !DECL_HAS_VALUE_EXPR_P (new_var)
2401 && !is_gimple_reg (new_var)
2402 && auto_var_in_fn_p (new_var, id->dst_fn))
2404 if (vars == NULL)
2405 vars = new live_vars_map;
2406 vars->put (DECL_UID (var), cnt++);
2409 if (vars == NULL)
2410 return;
2412 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2413 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2414 if (VAR_P (var))
2416 edge e;
2417 edge_iterator ei;
2418 bool needed = false;
2419 unsigned int *v = vars->get (DECL_UID (var));
2420 if (v == NULL)
2421 continue;
2422 FOR_EACH_EDGE (e, ei, bb->preds)
2423 if ((e->flags & EDGE_EH) != 0
2424 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2426 basic_block src_bb = (basic_block) e->src->aux;
2428 if (bitmap_bit_p (&live[src_bb->index], *v))
2430 needed = true;
2431 break;
2434 if (needed)
2436 tree new_var = *id->decl_map->get (var);
2437 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2438 tree clobber = build_clobber (TREE_TYPE (new_var));
2439 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2440 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2443 destroy_live_vars (live);
2444 delete vars;
2447 /* Copy edges from BB into its copy constructed earlier, scale profile
2448 accordingly. Edges will be taken care of later. Assume aux
2449 pointers to point to the copies of each BB. Return true if any
2450 debug stmts are left after a statement that must end the basic block. */
2452 static bool
2453 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2454 basic_block ret_bb, basic_block abnormal_goto_dest,
2455 copy_body_data *id)
2457 basic_block new_bb = (basic_block) bb->aux;
2458 edge_iterator ei;
2459 edge old_edge;
2460 gimple_stmt_iterator si;
2461 bool need_debug_cleanup = false;
2463 /* Use the indices from the original blocks to create edges for the
2464 new ones. */
2465 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2466 if (!(old_edge->flags & EDGE_EH))
2468 edge new_edge;
2469 int flags = old_edge->flags;
2470 location_t locus = old_edge->goto_locus;
2472 /* Return edges do get a FALLTHRU flag when they get inlined. */
2473 if (old_edge->dest->index == EXIT_BLOCK
2474 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2475 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2476 flags |= EDGE_FALLTHRU;
2478 new_edge
2479 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2480 new_edge->probability = old_edge->probability;
2481 if (!id->reset_location)
2482 new_edge->goto_locus = remap_location (locus, id);
2485 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2486 return false;
2488 /* When doing function splitting, we must decrease count of the return block
2489 which was previously reachable by block we did not copy. */
2490 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2491 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2492 if (old_edge->src->index != ENTRY_BLOCK
2493 && !old_edge->src->aux)
2494 new_bb->count -= old_edge->count ().apply_scale (num, den);
2496 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2498 gimple *copy_stmt;
2499 bool can_throw, nonlocal_goto;
2501 copy_stmt = gsi_stmt (si);
2502 if (!is_gimple_debug (copy_stmt))
2503 update_stmt (copy_stmt);
2505 /* Do this before the possible split_block. */
2506 gsi_next (&si);
2508 /* If this tree could throw an exception, there are two
2509 cases where we need to add abnormal edge(s): the
2510 tree wasn't in a region and there is a "current
2511 region" in the caller; or the original tree had
2512 EH edges. In both cases split the block after the tree,
2513 and add abnormal edge(s) as needed; we need both
2514 those from the callee and the caller.
2515 We check whether the copy can throw, because the const
2516 propagation can change an INDIRECT_REF which throws
2517 into a COMPONENT_REF which doesn't. If the copy
2518 can throw, the original could also throw. */
2519 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2520 nonlocal_goto
2521 = (stmt_can_make_abnormal_goto (copy_stmt)
2522 && !computed_goto_p (copy_stmt));
2524 if (can_throw || nonlocal_goto)
2526 if (!gsi_end_p (si))
2528 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2529 gsi_next (&si);
2530 if (gsi_end_p (si))
2531 need_debug_cleanup = true;
2533 if (!gsi_end_p (si))
2534 /* Note that bb's predecessor edges aren't necessarily
2535 right at this point; split_block doesn't care. */
2537 edge e = split_block (new_bb, copy_stmt);
2539 new_bb = e->dest;
2540 new_bb->aux = e->src->aux;
2541 si = gsi_start_bb (new_bb);
2545 bool update_probs = false;
2547 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2549 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2550 update_probs = true;
2552 else if (can_throw)
2554 make_eh_edges (copy_stmt);
2555 update_probs = true;
2558 /* EH edges may not match old edges. Copy as much as possible. */
2559 if (update_probs)
2561 edge e;
2562 edge_iterator ei;
2563 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2565 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2566 if ((old_edge->flags & EDGE_EH)
2567 && (e = find_edge (copy_stmt_bb,
2568 (basic_block) old_edge->dest->aux))
2569 && (e->flags & EDGE_EH))
2570 e->probability = old_edge->probability;
2572 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2573 if (e->flags & EDGE_EH)
2575 if (!e->probability.initialized_p ())
2576 e->probability = profile_probability::never ();
2577 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2579 if (id->eh_landing_pad_dest == NULL)
2580 id->eh_landing_pad_dest = e->dest;
2581 else
2582 gcc_assert (id->eh_landing_pad_dest == e->dest);
2588 /* If the call we inline cannot make abnormal goto do not add
2589 additional abnormal edges but only retain those already present
2590 in the original function body. */
2591 if (abnormal_goto_dest == NULL)
2592 nonlocal_goto = false;
2593 if (nonlocal_goto)
2595 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2597 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2598 nonlocal_goto = false;
2599 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2600 in OpenMP regions which aren't allowed to be left abnormally.
2601 So, no need to add abnormal edge in that case. */
2602 else if (is_gimple_call (copy_stmt)
2603 && gimple_call_internal_p (copy_stmt)
2604 && (gimple_call_internal_fn (copy_stmt)
2605 == IFN_ABNORMAL_DISPATCHER)
2606 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2607 nonlocal_goto = false;
2608 else
2609 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2610 EDGE_ABNORMAL);
2613 if ((can_throw || nonlocal_goto)
2614 && gimple_in_ssa_p (cfun))
2615 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2616 can_throw, nonlocal_goto);
2618 return need_debug_cleanup;
2621 /* Copy the PHIs. All blocks and edges are copied, some blocks
2622 was possibly split and new outgoing EH edges inserted.
2623 BB points to the block of original function and AUX pointers links
2624 the original and newly copied blocks. */
2626 static void
2627 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2629 basic_block const new_bb = (basic_block) bb->aux;
2630 edge_iterator ei;
2631 gphi *phi;
2632 gphi_iterator si;
2633 edge new_edge;
2634 bool inserted = false;
2636 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2638 tree res, new_res;
2639 gphi *new_phi;
2641 phi = si.phi ();
2642 res = PHI_RESULT (phi);
2643 new_res = res;
2644 if (!virtual_operand_p (res))
2646 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2647 if (EDGE_COUNT (new_bb->preds) == 0)
2649 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2650 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2652 else
2654 new_phi = create_phi_node (new_res, new_bb);
2655 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2657 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2658 bb);
2659 tree arg;
2660 tree new_arg;
2661 edge_iterator ei2;
2662 location_t locus;
2664 /* When doing partial cloning, we allow PHIs on the entry
2665 block as long as all the arguments are the same.
2666 Find any input edge to see argument to copy. */
2667 if (!old_edge)
2668 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2669 if (!old_edge->src->aux)
2670 break;
2672 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2673 new_arg = arg;
2674 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2675 gcc_assert (new_arg);
2676 /* With return slot optimization we can end up with
2677 non-gimple (foo *)&this->m, fix that here. */
2678 if (TREE_CODE (new_arg) != SSA_NAME
2679 && TREE_CODE (new_arg) != FUNCTION_DECL
2680 && !is_gimple_val (new_arg))
2682 gimple_seq stmts = NULL;
2683 new_arg = force_gimple_operand (new_arg, &stmts, true,
2684 NULL);
2685 gsi_insert_seq_on_edge (new_edge, stmts);
2686 inserted = true;
2688 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2689 if (id->reset_location)
2690 locus = input_location;
2691 else
2692 locus = remap_location (locus, id);
2693 add_phi_arg (new_phi, new_arg, new_edge, locus);
2699 /* Commit the delayed edge insertions. */
2700 if (inserted)
2701 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2702 gsi_commit_one_edge_insert (new_edge, NULL);
2706 /* Wrapper for remap_decl so it can be used as a callback. */
2708 static tree
2709 remap_decl_1 (tree decl, void *data)
2711 return remap_decl (decl, (copy_body_data *) data);
2714 /* Build struct function and associated datastructures for the new clone
2715 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2716 the cfun to the function of new_fndecl (and current_function_decl too). */
2718 static void
2719 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2721 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2723 if (!DECL_ARGUMENTS (new_fndecl))
2724 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2725 if (!DECL_RESULT (new_fndecl))
2726 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2728 /* Register specific tree functions. */
2729 gimple_register_cfg_hooks ();
2731 /* Get clean struct function. */
2732 push_struct_function (new_fndecl);
2734 /* We will rebuild these, so just sanity check that they are empty. */
2735 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2736 gcc_assert (cfun->local_decls == NULL);
2737 gcc_assert (cfun->cfg == NULL);
2738 gcc_assert (cfun->decl == new_fndecl);
2740 /* Copy items we preserve during cloning. */
2741 cfun->static_chain_decl = src_cfun->static_chain_decl;
2742 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2743 cfun->function_end_locus = src_cfun->function_end_locus;
2744 cfun->curr_properties = src_cfun->curr_properties;
2745 cfun->last_verified = src_cfun->last_verified;
2746 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2747 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2748 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2749 cfun->calls_eh_return = src_cfun->calls_eh_return;
2750 cfun->stdarg = src_cfun->stdarg;
2751 cfun->after_inlining = src_cfun->after_inlining;
2752 cfun->can_throw_non_call_exceptions
2753 = src_cfun->can_throw_non_call_exceptions;
2754 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2755 cfun->returns_struct = src_cfun->returns_struct;
2756 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2758 init_empty_tree_cfg ();
2760 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2762 profile_count num = count;
2763 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2764 profile_count::adjust_for_ipa_scaling (&num, &den);
2766 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2767 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2768 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2769 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2770 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2771 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2772 if (src_cfun->eh)
2773 init_eh_for_function ();
2775 if (src_cfun->gimple_df)
2777 init_tree_ssa (cfun);
2778 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2779 if (cfun->gimple_df->in_ssa_p)
2780 init_ssa_operands (cfun);
2784 /* Helper function for copy_cfg_body. Move debug stmts from the end
2785 of NEW_BB to the beginning of successor basic blocks when needed. If the
2786 successor has multiple predecessors, reset them, otherwise keep
2787 their value. */
2789 static void
2790 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2792 edge e;
2793 edge_iterator ei;
2794 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2796 if (gsi_end_p (si)
2797 || gsi_one_before_end_p (si)
2798 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2799 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2800 return;
2802 FOR_EACH_EDGE (e, ei, new_bb->succs)
2804 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2805 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2806 while (is_gimple_debug (gsi_stmt (ssi)))
2808 gimple *stmt = gsi_stmt (ssi);
2809 gdebug *new_stmt;
2810 tree var;
2811 tree value;
2813 /* For the last edge move the debug stmts instead of copying
2814 them. */
2815 if (ei_one_before_end_p (ei))
2817 si = ssi;
2818 gsi_prev (&ssi);
2819 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2821 gimple_debug_bind_reset_value (stmt);
2822 gimple_set_location (stmt, UNKNOWN_LOCATION);
2824 gsi_remove (&si, false);
2825 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2826 continue;
2829 if (gimple_debug_bind_p (stmt))
2831 var = gimple_debug_bind_get_var (stmt);
2832 if (single_pred_p (e->dest))
2834 value = gimple_debug_bind_get_value (stmt);
2835 value = unshare_expr (value);
2836 new_stmt = gimple_build_debug_bind (var, value, stmt);
2838 else
2839 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2841 else if (gimple_debug_source_bind_p (stmt))
2843 var = gimple_debug_source_bind_get_var (stmt);
2844 value = gimple_debug_source_bind_get_value (stmt);
2845 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2847 else if (gimple_debug_nonbind_marker_p (stmt))
2848 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2849 else
2850 gcc_unreachable ();
2851 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2852 id->debug_stmts.safe_push (new_stmt);
2853 gsi_prev (&ssi);
2858 /* Make a copy of the sub-loops of SRC_PARENT and place them
2859 as siblings of DEST_PARENT. */
2861 static void
2862 copy_loops (copy_body_data *id,
2863 class loop *dest_parent, class loop *src_parent)
2865 class loop *src_loop = src_parent->inner;
2866 while (src_loop)
2868 if (!id->blocks_to_copy
2869 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2871 class loop *dest_loop = alloc_loop ();
2873 /* Assign the new loop its header and latch and associate
2874 those with the new loop. */
2875 dest_loop->header = (basic_block)src_loop->header->aux;
2876 dest_loop->header->loop_father = dest_loop;
2877 if (src_loop->latch != NULL)
2879 dest_loop->latch = (basic_block)src_loop->latch->aux;
2880 dest_loop->latch->loop_father = dest_loop;
2883 /* Copy loop meta-data. */
2884 copy_loop_info (src_loop, dest_loop);
2885 if (dest_loop->unroll)
2886 cfun->has_unroll = true;
2887 if (dest_loop->force_vectorize)
2888 cfun->has_force_vectorize_loops = true;
2889 if (id->src_cfun->last_clique != 0)
2890 dest_loop->owned_clique
2891 = remap_dependence_clique (id,
2892 src_loop->owned_clique
2893 ? src_loop->owned_clique : 1);
2895 /* Finally place it into the loop array and the loop tree. */
2896 place_new_loop (cfun, dest_loop);
2897 flow_loop_tree_node_add (dest_parent, dest_loop);
2899 if (src_loop->simduid)
2901 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2902 cfun->has_simduid_loops = true;
2905 /* Recurse. */
2906 copy_loops (id, dest_loop, src_loop);
2908 src_loop = src_loop->next;
2912 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2914 void
2915 redirect_all_calls (copy_body_data * id, basic_block bb)
2917 gimple_stmt_iterator si;
2918 gimple *last = last_stmt (bb);
2919 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2921 gimple *stmt = gsi_stmt (si);
2922 if (is_gimple_call (stmt))
2924 tree old_lhs = gimple_call_lhs (stmt);
2925 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2926 if (edge)
2928 gimple *new_stmt
2929 = cgraph_edge::redirect_call_stmt_to_callee (edge);
2930 /* If IPA-SRA transformation, run as part of edge redirection,
2931 removed the LHS because it is unused, save it to
2932 killed_new_ssa_names so that we can prune it from debug
2933 statements. */
2934 if (old_lhs
2935 && TREE_CODE (old_lhs) == SSA_NAME
2936 && !gimple_call_lhs (new_stmt))
2938 if (!id->killed_new_ssa_names)
2939 id->killed_new_ssa_names = new hash_set<tree> (16);
2940 id->killed_new_ssa_names->add (old_lhs);
2943 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2944 gimple_purge_dead_eh_edges (bb);
2950 /* Make a copy of the body of FN so that it can be inserted inline in
2951 another function. Walks FN via CFG, returns new fndecl. */
2953 static tree
2954 copy_cfg_body (copy_body_data * id,
2955 basic_block entry_block_map, basic_block exit_block_map,
2956 basic_block new_entry)
2958 tree callee_fndecl = id->src_fn;
2959 /* Original cfun for the callee, doesn't change. */
2960 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2961 struct function *cfun_to_copy;
2962 basic_block bb;
2963 tree new_fndecl = NULL;
2964 bool need_debug_cleanup = false;
2965 int last;
2966 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2967 profile_count num = entry_block_map->count;
2969 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2971 /* Register specific tree functions. */
2972 gimple_register_cfg_hooks ();
2974 /* If we are inlining just region of the function, make sure to connect
2975 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2976 part of loop, we must compute frequency and probability of
2977 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2978 probabilities of edges incoming from nonduplicated region. */
2979 if (new_entry)
2981 edge e;
2982 edge_iterator ei;
2983 den = profile_count::zero ();
2985 FOR_EACH_EDGE (e, ei, new_entry->preds)
2986 if (!e->src->aux)
2987 den += e->count ();
2988 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2991 profile_count::adjust_for_ipa_scaling (&num, &den);
2993 /* Must have a CFG here at this point. */
2994 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2995 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2998 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2999 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3000 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3001 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3003 /* Duplicate any exception-handling regions. */
3004 if (cfun->eh)
3005 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3006 remap_decl_1, id);
3008 /* Use aux pointers to map the original blocks to copy. */
3009 FOR_EACH_BB_FN (bb, cfun_to_copy)
3010 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3012 basic_block new_bb = copy_bb (id, bb, num, den);
3013 bb->aux = new_bb;
3014 new_bb->aux = bb;
3015 new_bb->loop_father = entry_block_map->loop_father;
3018 last = last_basic_block_for_fn (cfun);
3020 /* Now that we've duplicated the blocks, duplicate their edges. */
3021 basic_block abnormal_goto_dest = NULL;
3022 if (id->call_stmt
3023 && stmt_can_make_abnormal_goto (id->call_stmt))
3025 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3027 bb = gimple_bb (id->call_stmt);
3028 gsi_next (&gsi);
3029 if (gsi_end_p (gsi))
3030 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3032 FOR_ALL_BB_FN (bb, cfun_to_copy)
3033 if (!id->blocks_to_copy
3034 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3035 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3036 abnormal_goto_dest, id);
3038 if (id->eh_landing_pad_dest)
3040 add_clobbers_to_eh_landing_pad (id);
3041 id->eh_landing_pad_dest = NULL;
3044 if (new_entry)
3046 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3047 EDGE_FALLTHRU);
3048 e->probability = profile_probability::always ();
3051 /* Duplicate the loop tree, if available and wanted. */
3052 if (loops_for_fn (src_cfun) != NULL
3053 && current_loops != NULL)
3055 copy_loops (id, entry_block_map->loop_father,
3056 get_loop (src_cfun, 0));
3057 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3058 loops_state_set (LOOPS_NEED_FIXUP);
3061 /* If the loop tree in the source function needed fixup, mark the
3062 destination loop tree for fixup, too. */
3063 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3064 loops_state_set (LOOPS_NEED_FIXUP);
3066 if (gimple_in_ssa_p (cfun))
3067 FOR_ALL_BB_FN (bb, cfun_to_copy)
3068 if (!id->blocks_to_copy
3069 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3070 copy_phis_for_bb (bb, id);
3072 FOR_ALL_BB_FN (bb, cfun_to_copy)
3073 if (bb->aux)
3075 if (need_debug_cleanup
3076 && bb->index != ENTRY_BLOCK
3077 && bb->index != EXIT_BLOCK)
3078 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3079 /* Update call edge destinations. This cannot be done before loop
3080 info is updated, because we may split basic blocks. */
3081 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3082 && bb->index != ENTRY_BLOCK
3083 && bb->index != EXIT_BLOCK)
3084 redirect_all_calls (id, (basic_block)bb->aux);
3085 ((basic_block)bb->aux)->aux = NULL;
3086 bb->aux = NULL;
3089 /* Zero out AUX fields of newly created block during EH edge
3090 insertion. */
3091 for (; last < last_basic_block_for_fn (cfun); last++)
3093 if (need_debug_cleanup)
3094 maybe_move_debug_stmts_to_successors (id,
3095 BASIC_BLOCK_FOR_FN (cfun, last));
3096 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3097 /* Update call edge destinations. This cannot be done before loop
3098 info is updated, because we may split basic blocks. */
3099 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3100 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3102 entry_block_map->aux = NULL;
3103 exit_block_map->aux = NULL;
3105 if (id->eh_map)
3107 delete id->eh_map;
3108 id->eh_map = NULL;
3110 if (id->dependence_map)
3112 delete id->dependence_map;
3113 id->dependence_map = NULL;
3116 return new_fndecl;
3119 /* Copy the debug STMT using ID. We deal with these statements in a
3120 special way: if any variable in their VALUE expression wasn't
3121 remapped yet, we won't remap it, because that would get decl uids
3122 out of sync, causing codegen differences between -g and -g0. If
3123 this arises, we drop the VALUE expression altogether. */
3125 static void
3126 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3128 tree t, *n;
3129 struct walk_stmt_info wi;
3131 if (tree block = gimple_block (stmt))
3133 n = id->decl_map->get (block);
3134 gimple_set_block (stmt, n ? *n : id->block);
3137 if (gimple_debug_nonbind_marker_p (stmt))
3138 return;
3140 /* Remap all the operands in COPY. */
3141 memset (&wi, 0, sizeof (wi));
3142 wi.info = id;
3144 processing_debug_stmt = 1;
3146 if (gimple_debug_source_bind_p (stmt))
3147 t = gimple_debug_source_bind_get_var (stmt);
3148 else if (gimple_debug_bind_p (stmt))
3149 t = gimple_debug_bind_get_var (stmt);
3150 else
3151 gcc_unreachable ();
3153 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3154 && (n = id->debug_map->get (t)))
3156 gcc_assert (VAR_P (*n));
3157 t = *n;
3159 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3160 /* T is a non-localized variable. */;
3161 else
3162 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3164 if (gimple_debug_bind_p (stmt))
3166 gimple_debug_bind_set_var (stmt, t);
3168 if (gimple_debug_bind_has_value_p (stmt))
3169 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3170 remap_gimple_op_r, &wi, NULL);
3172 /* Punt if any decl couldn't be remapped. */
3173 if (processing_debug_stmt < 0)
3174 gimple_debug_bind_reset_value (stmt);
3176 else if (gimple_debug_source_bind_p (stmt))
3178 gimple_debug_source_bind_set_var (stmt, t);
3179 /* When inlining and source bind refers to one of the optimized
3180 away parameters, change the source bind into normal debug bind
3181 referring to the corresponding DEBUG_EXPR_DECL that should have
3182 been bound before the call stmt. */
3183 t = gimple_debug_source_bind_get_value (stmt);
3184 if (t != NULL_TREE
3185 && TREE_CODE (t) == PARM_DECL
3186 && id->call_stmt)
3188 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3189 unsigned int i;
3190 if (debug_args != NULL)
3192 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3193 if ((**debug_args)[i] == DECL_ORIGIN (t)
3194 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3196 t = (**debug_args)[i + 1];
3197 stmt->subcode = GIMPLE_DEBUG_BIND;
3198 gimple_debug_bind_set_value (stmt, t);
3199 break;
3203 if (gimple_debug_source_bind_p (stmt))
3204 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3205 remap_gimple_op_r, &wi, NULL);
3208 processing_debug_stmt = 0;
3210 update_stmt (stmt);
3213 /* Process deferred debug stmts. In order to give values better odds
3214 of being successfully remapped, we delay the processing of debug
3215 stmts until all other stmts that might require remapping are
3216 processed. */
3218 static void
3219 copy_debug_stmts (copy_body_data *id)
3221 size_t i;
3222 gdebug *stmt;
3224 if (!id->debug_stmts.exists ())
3225 return;
3227 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3228 copy_debug_stmt (stmt, id);
3230 id->debug_stmts.release ();
3233 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3234 another function. */
3236 static tree
3237 copy_tree_body (copy_body_data *id)
3239 tree fndecl = id->src_fn;
3240 tree body = DECL_SAVED_TREE (fndecl);
3242 walk_tree (&body, copy_tree_body_r, id, NULL);
3244 return body;
3247 /* Make a copy of the body of FN so that it can be inserted inline in
3248 another function. */
3250 static tree
3251 copy_body (copy_body_data *id,
3252 basic_block entry_block_map, basic_block exit_block_map,
3253 basic_block new_entry)
3255 tree fndecl = id->src_fn;
3256 tree body;
3258 /* If this body has a CFG, walk CFG and copy. */
3259 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3260 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3261 new_entry);
3262 copy_debug_stmts (id);
3263 delete id->killed_new_ssa_names;
3264 id->killed_new_ssa_names = NULL;
3266 return body;
3269 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3270 defined in function FN, or of a data member thereof. */
3272 static bool
3273 self_inlining_addr_expr (tree value, tree fn)
3275 tree var;
3277 if (TREE_CODE (value) != ADDR_EXPR)
3278 return false;
3280 var = get_base_address (TREE_OPERAND (value, 0));
3282 return var && auto_var_in_fn_p (var, fn);
3285 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3286 lexical block and line number information from base_stmt, if given,
3287 or from the last stmt of the block otherwise. */
3289 static gimple *
3290 insert_init_debug_bind (copy_body_data *id,
3291 basic_block bb, tree var, tree value,
3292 gimple *base_stmt)
3294 gimple *note;
3295 gimple_stmt_iterator gsi;
3296 tree tracked_var;
3298 if (!gimple_in_ssa_p (id->src_cfun))
3299 return NULL;
3301 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3302 return NULL;
3304 tracked_var = target_for_debug_bind (var);
3305 if (!tracked_var)
3306 return NULL;
3308 if (bb)
3310 gsi = gsi_last_bb (bb);
3311 if (!base_stmt && !gsi_end_p (gsi))
3312 base_stmt = gsi_stmt (gsi);
3315 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3317 if (bb)
3319 if (!gsi_end_p (gsi))
3320 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3321 else
3322 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3325 return note;
3328 static void
3329 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3331 /* If VAR represents a zero-sized variable, it's possible that the
3332 assignment statement may result in no gimple statements. */
3333 if (init_stmt)
3335 gimple_stmt_iterator si = gsi_last_bb (bb);
3337 /* We can end up with init statements that store to a non-register
3338 from a rhs with a conversion. Handle that here by forcing the
3339 rhs into a temporary. gimple_regimplify_operands is not
3340 prepared to do this for us. */
3341 if (!is_gimple_debug (init_stmt)
3342 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3343 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3344 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3346 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3347 gimple_expr_type (init_stmt),
3348 gimple_assign_rhs1 (init_stmt));
3349 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3350 GSI_NEW_STMT);
3351 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3352 gimple_assign_set_rhs1 (init_stmt, rhs);
3354 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3355 gimple_regimplify_operands (init_stmt, &si);
3357 if (!is_gimple_debug (init_stmt))
3359 tree def = gimple_assign_lhs (init_stmt);
3360 insert_init_debug_bind (id, bb, def, def, init_stmt);
3365 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3366 if need be (which should only be necessary for invalid programs). Attempt
3367 to convert VAL to TYPE and return the result if it is possible, just return
3368 a zero constant of the given type if it fails. */
3370 tree
3371 force_value_to_type (tree type, tree value)
3373 /* If we can match up types by promotion/demotion do so. */
3374 if (fold_convertible_p (type, value))
3375 return fold_convert (type, value);
3377 /* ??? For valid programs we should not end up here.
3378 Still if we end up with truly mismatched types here, fall back
3379 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3380 GIMPLE to the following passes. */
3381 if (!is_gimple_reg_type (TREE_TYPE (value))
3382 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3383 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3384 else
3385 return build_zero_cst (type);
3388 /* Initialize parameter P with VALUE. If needed, produce init statement
3389 at the end of BB. When BB is NULL, we return init statement to be
3390 output later. */
3391 static gimple *
3392 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3393 basic_block bb, tree *vars)
3395 gimple *init_stmt = NULL;
3396 tree var;
3397 tree rhs = value;
3398 tree def = (gimple_in_ssa_p (cfun)
3399 ? ssa_default_def (id->src_cfun, p) : NULL);
3401 if (value
3402 && value != error_mark_node
3403 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3404 rhs = force_value_to_type (TREE_TYPE (p), value);
3406 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3407 here since the type of this decl must be visible to the calling
3408 function. */
3409 var = copy_decl_to_var (p, id);
3411 /* Declare this new variable. */
3412 DECL_CHAIN (var) = *vars;
3413 *vars = var;
3415 /* Make gimplifier happy about this variable. */
3416 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3418 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3419 we would not need to create a new variable here at all, if it
3420 weren't for debug info. Still, we can just use the argument
3421 value. */
3422 if (TREE_READONLY (p)
3423 && !TREE_ADDRESSABLE (p)
3424 && value && !TREE_SIDE_EFFECTS (value)
3425 && !def)
3427 /* We may produce non-gimple trees by adding NOPs or introduce
3428 invalid sharing when operand is not really constant.
3429 It is not big deal to prohibit constant propagation here as
3430 we will constant propagate in DOM1 pass anyway. */
3431 if (is_gimple_min_invariant (value)
3432 && useless_type_conversion_p (TREE_TYPE (p),
3433 TREE_TYPE (value))
3434 /* We have to be very careful about ADDR_EXPR. Make sure
3435 the base variable isn't a local variable of the inlined
3436 function, e.g., when doing recursive inlining, direct or
3437 mutually-recursive or whatever, which is why we don't
3438 just test whether fn == current_function_decl. */
3439 && ! self_inlining_addr_expr (value, fn))
3441 insert_decl_map (id, p, value);
3442 insert_debug_decl_map (id, p, var);
3443 return insert_init_debug_bind (id, bb, var, value, NULL);
3447 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3448 that way, when the PARM_DECL is encountered, it will be
3449 automatically replaced by the VAR_DECL. */
3450 insert_decl_map (id, p, var);
3452 /* Even if P was TREE_READONLY, the new VAR should not be.
3453 In the original code, we would have constructed a
3454 temporary, and then the function body would have never
3455 changed the value of P. However, now, we will be
3456 constructing VAR directly. The constructor body may
3457 change its value multiple times as it is being
3458 constructed. Therefore, it must not be TREE_READONLY;
3459 the back-end assumes that TREE_READONLY variable is
3460 assigned to only once. */
3461 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3462 TREE_READONLY (var) = 0;
3464 /* If there is no setup required and we are in SSA, take the easy route
3465 replacing all SSA names representing the function parameter by the
3466 SSA name passed to function.
3468 We need to construct map for the variable anyway as it might be used
3469 in different SSA names when parameter is set in function.
3471 Do replacement at -O0 for const arguments replaced by constant.
3472 This is important for builtin_constant_p and other construct requiring
3473 constant argument to be visible in inlined function body. */
3474 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3475 && (optimize
3476 || (TREE_READONLY (p)
3477 && is_gimple_min_invariant (rhs)))
3478 && (TREE_CODE (rhs) == SSA_NAME
3479 || is_gimple_min_invariant (rhs))
3480 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3482 insert_decl_map (id, def, rhs);
3483 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3486 /* If the value of argument is never used, don't care about initializing
3487 it. */
3488 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3490 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3491 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3494 /* Initialize this VAR_DECL from the equivalent argument. Convert
3495 the argument to the proper type in case it was promoted. */
3496 if (value)
3498 if (rhs == error_mark_node)
3500 insert_decl_map (id, p, var);
3501 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3504 STRIP_USELESS_TYPE_CONVERSION (rhs);
3506 /* If we are in SSA form properly remap the default definition
3507 or assign to a dummy SSA name if the parameter is unused and
3508 we are not optimizing. */
3509 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3511 if (def)
3513 def = remap_ssa_name (def, id);
3514 init_stmt = gimple_build_assign (def, rhs);
3515 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3516 set_ssa_default_def (cfun, var, NULL);
3518 else if (!optimize)
3520 def = make_ssa_name (var);
3521 init_stmt = gimple_build_assign (def, rhs);
3524 else
3525 init_stmt = gimple_build_assign (var, rhs);
3527 if (bb && init_stmt)
3528 insert_init_stmt (id, bb, init_stmt);
3530 return init_stmt;
3533 /* Generate code to initialize the parameters of the function at the
3534 top of the stack in ID from the GIMPLE_CALL STMT. */
3536 static void
3537 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3538 tree fn, basic_block bb)
3540 tree parms;
3541 size_t i;
3542 tree p;
3543 tree vars = NULL_TREE;
3544 tree static_chain = gimple_call_chain (stmt);
3546 /* Figure out what the parameters are. */
3547 parms = DECL_ARGUMENTS (fn);
3549 /* Loop through the parameter declarations, replacing each with an
3550 equivalent VAR_DECL, appropriately initialized. */
3551 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3553 tree val;
3554 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3555 setup_one_parameter (id, p, val, fn, bb, &vars);
3557 /* After remapping parameters remap their types. This has to be done
3558 in a second loop over all parameters to appropriately remap
3559 variable sized arrays when the size is specified in a
3560 parameter following the array. */
3561 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3563 tree *varp = id->decl_map->get (p);
3564 if (varp && VAR_P (*varp))
3566 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3567 ? ssa_default_def (id->src_cfun, p) : NULL);
3568 tree var = *varp;
3569 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3570 /* Also remap the default definition if it was remapped
3571 to the default definition of the parameter replacement
3572 by the parameter setup. */
3573 if (def)
3575 tree *defp = id->decl_map->get (def);
3576 if (defp
3577 && TREE_CODE (*defp) == SSA_NAME
3578 && SSA_NAME_VAR (*defp) == var)
3579 TREE_TYPE (*defp) = TREE_TYPE (var);
3584 /* Initialize the static chain. */
3585 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3586 gcc_assert (fn != current_function_decl);
3587 if (p)
3589 /* No static chain? Seems like a bug in tree-nested.c. */
3590 gcc_assert (static_chain);
3592 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3595 declare_inline_vars (id->block, vars);
3599 /* Declare a return variable to replace the RESULT_DECL for the
3600 function we are calling. An appropriate DECL_STMT is returned.
3601 The USE_STMT is filled to contain a use of the declaration to
3602 indicate the return value of the function.
3604 RETURN_SLOT, if non-null is place where to store the result. It
3605 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3606 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3608 The return value is a (possibly null) value that holds the result
3609 as seen by the caller. */
3611 static tree
3612 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3613 basic_block entry_bb)
3615 tree callee = id->src_fn;
3616 tree result = DECL_RESULT (callee);
3617 tree callee_type = TREE_TYPE (result);
3618 tree caller_type;
3619 tree var, use;
3621 /* Handle type-mismatches in the function declaration return type
3622 vs. the call expression. */
3623 if (modify_dest)
3624 caller_type = TREE_TYPE (modify_dest);
3625 else if (return_slot)
3626 caller_type = TREE_TYPE (return_slot);
3627 else /* No LHS on the call. */
3628 caller_type = TREE_TYPE (TREE_TYPE (callee));
3630 /* We don't need to do anything for functions that don't return anything. */
3631 if (VOID_TYPE_P (callee_type))
3632 return NULL_TREE;
3634 /* If there was a return slot, then the return value is the
3635 dereferenced address of that object. */
3636 if (return_slot)
3638 /* The front end shouldn't have used both return_slot and
3639 a modify expression. */
3640 gcc_assert (!modify_dest);
3641 if (DECL_BY_REFERENCE (result))
3643 tree return_slot_addr = build_fold_addr_expr (return_slot);
3644 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3646 /* We are going to construct *&return_slot and we can't do that
3647 for variables believed to be not addressable.
3649 FIXME: This check possibly can match, because values returned
3650 via return slot optimization are not believed to have address
3651 taken by alias analysis. */
3652 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3653 var = return_slot_addr;
3654 mark_addressable (return_slot);
3656 else
3658 var = return_slot;
3659 gcc_assert (TREE_CODE (var) != SSA_NAME);
3660 if (TREE_ADDRESSABLE (result))
3661 mark_addressable (var);
3663 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3664 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3665 && !DECL_GIMPLE_REG_P (result)
3666 && DECL_P (var))
3667 DECL_GIMPLE_REG_P (var) = 0;
3669 if (!useless_type_conversion_p (callee_type, caller_type))
3670 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3672 use = NULL;
3673 goto done;
3676 /* All types requiring non-trivial constructors should have been handled. */
3677 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3679 /* Attempt to avoid creating a new temporary variable. */
3680 if (modify_dest
3681 && TREE_CODE (modify_dest) != SSA_NAME)
3683 bool use_it = false;
3685 /* We can't use MODIFY_DEST if there's type promotion involved. */
3686 if (!useless_type_conversion_p (callee_type, caller_type))
3687 use_it = false;
3689 /* ??? If we're assigning to a variable sized type, then we must
3690 reuse the destination variable, because we've no good way to
3691 create variable sized temporaries at this point. */
3692 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3693 use_it = true;
3695 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3696 reuse it as the result of the call directly. Don't do this if
3697 it would promote MODIFY_DEST to addressable. */
3698 else if (TREE_ADDRESSABLE (result))
3699 use_it = false;
3700 else
3702 tree base_m = get_base_address (modify_dest);
3704 /* If the base isn't a decl, then it's a pointer, and we don't
3705 know where that's going to go. */
3706 if (!DECL_P (base_m))
3707 use_it = false;
3708 else if (is_global_var (base_m))
3709 use_it = false;
3710 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3711 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3712 && !DECL_GIMPLE_REG_P (result)
3713 && DECL_GIMPLE_REG_P (base_m))
3714 use_it = false;
3715 else if (!TREE_ADDRESSABLE (base_m))
3716 use_it = true;
3719 if (use_it)
3721 var = modify_dest;
3722 use = NULL;
3723 goto done;
3727 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3729 var = copy_result_decl_to_var (result, id);
3730 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3732 /* Do not have the rest of GCC warn about this variable as it should
3733 not be visible to the user. */
3734 TREE_NO_WARNING (var) = 1;
3736 declare_inline_vars (id->block, var);
3738 /* Build the use expr. If the return type of the function was
3739 promoted, convert it back to the expected type. */
3740 use = var;
3741 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3743 /* If we can match up types by promotion/demotion do so. */
3744 if (fold_convertible_p (caller_type, var))
3745 use = fold_convert (caller_type, var);
3746 else
3748 /* ??? For valid programs we should not end up here.
3749 Still if we end up with truly mismatched types here, fall back
3750 to using a MEM_REF to not leak invalid GIMPLE to the following
3751 passes. */
3752 /* Prevent var from being written into SSA form. */
3753 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3754 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3755 DECL_GIMPLE_REG_P (var) = false;
3756 else if (is_gimple_reg_type (TREE_TYPE (var)))
3757 TREE_ADDRESSABLE (var) = true;
3758 use = fold_build2 (MEM_REF, caller_type,
3759 build_fold_addr_expr (var),
3760 build_int_cst (ptr_type_node, 0));
3764 STRIP_USELESS_TYPE_CONVERSION (use);
3766 if (DECL_BY_REFERENCE (result))
3768 TREE_ADDRESSABLE (var) = 1;
3769 var = build_fold_addr_expr (var);
3772 done:
3773 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3774 way, when the RESULT_DECL is encountered, it will be
3775 automatically replaced by the VAR_DECL.
3777 When returning by reference, ensure that RESULT_DECL remaps to
3778 gimple_val. */
3779 if (DECL_BY_REFERENCE (result)
3780 && !is_gimple_val (var))
3782 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3783 insert_decl_map (id, result, temp);
3784 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3785 it's default_def SSA_NAME. */
3786 if (gimple_in_ssa_p (id->src_cfun)
3787 && is_gimple_reg (result))
3789 temp = make_ssa_name (temp);
3790 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3792 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3794 else
3795 insert_decl_map (id, result, var);
3797 /* Remember this so we can ignore it in remap_decls. */
3798 id->retvar = var;
3799 return use;
3802 /* Determine if the function can be copied. If so return NULL. If
3803 not return a string describng the reason for failure. */
3805 const char *
3806 copy_forbidden (struct function *fun)
3808 const char *reason = fun->cannot_be_copied_reason;
3810 /* Only examine the function once. */
3811 if (fun->cannot_be_copied_set)
3812 return reason;
3814 /* We cannot copy a function that receives a non-local goto
3815 because we cannot remap the destination label used in the
3816 function that is performing the non-local goto. */
3817 /* ??? Actually, this should be possible, if we work at it.
3818 No doubt there's just a handful of places that simply
3819 assume it doesn't happen and don't substitute properly. */
3820 if (fun->has_nonlocal_label)
3822 reason = G_("function %q+F can never be copied "
3823 "because it receives a non-local goto");
3824 goto fail;
3827 if (fun->has_forced_label_in_static)
3829 reason = G_("function %q+F can never be copied because it saves "
3830 "address of local label in a static variable");
3831 goto fail;
3834 fail:
3835 fun->cannot_be_copied_reason = reason;
3836 fun->cannot_be_copied_set = true;
3837 return reason;
3841 static const char *inline_forbidden_reason;
3843 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3844 iff a function cannot be inlined. Also sets the reason why. */
3846 static tree
3847 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3848 struct walk_stmt_info *wip)
3850 tree fn = (tree) wip->info;
3851 tree t;
3852 gimple *stmt = gsi_stmt (*gsi);
3854 switch (gimple_code (stmt))
3856 case GIMPLE_CALL:
3857 /* Refuse to inline alloca call unless user explicitly forced so as
3858 this may change program's memory overhead drastically when the
3859 function using alloca is called in loop. In GCC present in
3860 SPEC2000 inlining into schedule_block cause it to require 2GB of
3861 RAM instead of 256MB. Don't do so for alloca calls emitted for
3862 VLA objects as those can't cause unbounded growth (they're always
3863 wrapped inside stack_save/stack_restore regions. */
3864 if (gimple_maybe_alloca_call_p (stmt)
3865 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3866 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3868 inline_forbidden_reason
3869 = G_("function %q+F can never be inlined because it uses "
3870 "alloca (override using the always_inline attribute)");
3871 *handled_ops_p = true;
3872 return fn;
3875 t = gimple_call_fndecl (stmt);
3876 if (t == NULL_TREE)
3877 break;
3879 /* We cannot inline functions that call setjmp. */
3880 if (setjmp_call_p (t))
3882 inline_forbidden_reason
3883 = G_("function %q+F can never be inlined because it uses setjmp");
3884 *handled_ops_p = true;
3885 return t;
3888 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3889 switch (DECL_FUNCTION_CODE (t))
3891 /* We cannot inline functions that take a variable number of
3892 arguments. */
3893 case BUILT_IN_VA_START:
3894 case BUILT_IN_NEXT_ARG:
3895 case BUILT_IN_VA_END:
3896 inline_forbidden_reason
3897 = G_("function %q+F can never be inlined because it "
3898 "uses variable argument lists");
3899 *handled_ops_p = true;
3900 return t;
3902 case BUILT_IN_LONGJMP:
3903 /* We can't inline functions that call __builtin_longjmp at
3904 all. The non-local goto machinery really requires the
3905 destination be in a different function. If we allow the
3906 function calling __builtin_longjmp to be inlined into the
3907 function calling __builtin_setjmp, Things will Go Awry. */
3908 inline_forbidden_reason
3909 = G_("function %q+F can never be inlined because "
3910 "it uses setjmp-longjmp exception handling");
3911 *handled_ops_p = true;
3912 return t;
3914 case BUILT_IN_NONLOCAL_GOTO:
3915 /* Similarly. */
3916 inline_forbidden_reason
3917 = G_("function %q+F can never be inlined because "
3918 "it uses non-local goto");
3919 *handled_ops_p = true;
3920 return t;
3922 case BUILT_IN_RETURN:
3923 case BUILT_IN_APPLY_ARGS:
3924 /* If a __builtin_apply_args caller would be inlined,
3925 it would be saving arguments of the function it has
3926 been inlined into. Similarly __builtin_return would
3927 return from the function the inline has been inlined into. */
3928 inline_forbidden_reason
3929 = G_("function %q+F can never be inlined because "
3930 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3931 *handled_ops_p = true;
3932 return t;
3934 default:
3935 break;
3937 break;
3939 case GIMPLE_GOTO:
3940 t = gimple_goto_dest (stmt);
3942 /* We will not inline a function which uses computed goto. The
3943 addresses of its local labels, which may be tucked into
3944 global storage, are of course not constant across
3945 instantiations, which causes unexpected behavior. */
3946 if (TREE_CODE (t) != LABEL_DECL)
3948 inline_forbidden_reason
3949 = G_("function %q+F can never be inlined "
3950 "because it contains a computed goto");
3951 *handled_ops_p = true;
3952 return t;
3954 break;
3956 default:
3957 break;
3960 *handled_ops_p = false;
3961 return NULL_TREE;
3964 /* Return true if FNDECL is a function that cannot be inlined into
3965 another one. */
3967 static bool
3968 inline_forbidden_p (tree fndecl)
3970 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3971 struct walk_stmt_info wi;
3972 basic_block bb;
3973 bool forbidden_p = false;
3975 /* First check for shared reasons not to copy the code. */
3976 inline_forbidden_reason = copy_forbidden (fun);
3977 if (inline_forbidden_reason != NULL)
3978 return true;
3980 /* Next, walk the statements of the function looking for
3981 constraucts we can't handle, or are non-optimal for inlining. */
3982 hash_set<tree> visited_nodes;
3983 memset (&wi, 0, sizeof (wi));
3984 wi.info = (void *) fndecl;
3985 wi.pset = &visited_nodes;
3987 FOR_EACH_BB_FN (bb, fun)
3989 gimple *ret;
3990 gimple_seq seq = bb_seq (bb);
3991 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3992 forbidden_p = (ret != NULL);
3993 if (forbidden_p)
3994 break;
3997 return forbidden_p;
4000 /* Return false if the function FNDECL cannot be inlined on account of its
4001 attributes, true otherwise. */
4002 static bool
4003 function_attribute_inlinable_p (const_tree fndecl)
4005 if (targetm.attribute_table)
4007 const_tree a;
4009 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4011 const_tree name = get_attribute_name (a);
4012 int i;
4014 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4015 if (is_attribute_p (targetm.attribute_table[i].name, name))
4016 return targetm.function_attribute_inlinable_p (fndecl);
4020 return true;
4023 /* Returns nonzero if FN is a function that does not have any
4024 fundamental inline blocking properties. */
4026 bool
4027 tree_inlinable_function_p (tree fn)
4029 bool inlinable = true;
4030 bool do_warning;
4031 tree always_inline;
4033 /* If we've already decided this function shouldn't be inlined,
4034 there's no need to check again. */
4035 if (DECL_UNINLINABLE (fn))
4036 return false;
4038 /* We only warn for functions declared `inline' by the user. */
4039 do_warning = (opt_for_fn (fn, warn_inline)
4040 && DECL_DECLARED_INLINE_P (fn)
4041 && !DECL_NO_INLINE_WARNING_P (fn)
4042 && !DECL_IN_SYSTEM_HEADER (fn));
4044 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4046 if (flag_no_inline
4047 && always_inline == NULL)
4049 if (do_warning)
4050 warning (OPT_Winline, "function %q+F can never be inlined because it "
4051 "is suppressed using %<-fno-inline%>", fn);
4052 inlinable = false;
4055 else if (!function_attribute_inlinable_p (fn))
4057 if (do_warning)
4058 warning (OPT_Winline, "function %q+F can never be inlined because it "
4059 "uses attributes conflicting with inlining", fn);
4060 inlinable = false;
4063 else if (inline_forbidden_p (fn))
4065 /* See if we should warn about uninlinable functions. Previously,
4066 some of these warnings would be issued while trying to expand
4067 the function inline, but that would cause multiple warnings
4068 about functions that would for example call alloca. But since
4069 this a property of the function, just one warning is enough.
4070 As a bonus we can now give more details about the reason why a
4071 function is not inlinable. */
4072 if (always_inline)
4073 error (inline_forbidden_reason, fn);
4074 else if (do_warning)
4075 warning (OPT_Winline, inline_forbidden_reason, fn);
4077 inlinable = false;
4080 /* Squirrel away the result so that we don't have to check again. */
4081 DECL_UNINLINABLE (fn) = !inlinable;
4083 return inlinable;
4086 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4087 word size and take possible memcpy call into account and return
4088 cost based on whether optimizing for size or speed according to SPEED_P. */
4091 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4093 HOST_WIDE_INT size;
4095 gcc_assert (!VOID_TYPE_P (type));
4097 if (TREE_CODE (type) == VECTOR_TYPE)
4099 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4100 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4101 int orig_mode_size
4102 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4103 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4104 return ((orig_mode_size + simd_mode_size - 1)
4105 / simd_mode_size);
4108 size = int_size_in_bytes (type);
4110 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4111 /* Cost of a memcpy call, 3 arguments and the call. */
4112 return 4;
4113 else
4114 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4117 /* Returns cost of operation CODE, according to WEIGHTS */
4119 static int
4120 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4121 tree op1 ATTRIBUTE_UNUSED, tree op2)
4123 switch (code)
4125 /* These are "free" conversions, or their presumed cost
4126 is folded into other operations. */
4127 case RANGE_EXPR:
4128 CASE_CONVERT:
4129 case COMPLEX_EXPR:
4130 case PAREN_EXPR:
4131 case VIEW_CONVERT_EXPR:
4132 return 0;
4134 /* Assign cost of 1 to usual operations.
4135 ??? We may consider mapping RTL costs to this. */
4136 case COND_EXPR:
4137 case VEC_COND_EXPR:
4138 case VEC_PERM_EXPR:
4140 case PLUS_EXPR:
4141 case POINTER_PLUS_EXPR:
4142 case POINTER_DIFF_EXPR:
4143 case MINUS_EXPR:
4144 case MULT_EXPR:
4145 case MULT_HIGHPART_EXPR:
4147 case ADDR_SPACE_CONVERT_EXPR:
4148 case FIXED_CONVERT_EXPR:
4149 case FIX_TRUNC_EXPR:
4151 case NEGATE_EXPR:
4152 case FLOAT_EXPR:
4153 case MIN_EXPR:
4154 case MAX_EXPR:
4155 case ABS_EXPR:
4156 case ABSU_EXPR:
4158 case LSHIFT_EXPR:
4159 case RSHIFT_EXPR:
4160 case LROTATE_EXPR:
4161 case RROTATE_EXPR:
4163 case BIT_IOR_EXPR:
4164 case BIT_XOR_EXPR:
4165 case BIT_AND_EXPR:
4166 case BIT_NOT_EXPR:
4168 case TRUTH_ANDIF_EXPR:
4169 case TRUTH_ORIF_EXPR:
4170 case TRUTH_AND_EXPR:
4171 case TRUTH_OR_EXPR:
4172 case TRUTH_XOR_EXPR:
4173 case TRUTH_NOT_EXPR:
4175 case LT_EXPR:
4176 case LE_EXPR:
4177 case GT_EXPR:
4178 case GE_EXPR:
4179 case EQ_EXPR:
4180 case NE_EXPR:
4181 case ORDERED_EXPR:
4182 case UNORDERED_EXPR:
4184 case UNLT_EXPR:
4185 case UNLE_EXPR:
4186 case UNGT_EXPR:
4187 case UNGE_EXPR:
4188 case UNEQ_EXPR:
4189 case LTGT_EXPR:
4191 case CONJ_EXPR:
4193 case PREDECREMENT_EXPR:
4194 case PREINCREMENT_EXPR:
4195 case POSTDECREMENT_EXPR:
4196 case POSTINCREMENT_EXPR:
4198 case REALIGN_LOAD_EXPR:
4200 case WIDEN_SUM_EXPR:
4201 case WIDEN_MULT_EXPR:
4202 case DOT_PROD_EXPR:
4203 case SAD_EXPR:
4204 case WIDEN_MULT_PLUS_EXPR:
4205 case WIDEN_MULT_MINUS_EXPR:
4206 case WIDEN_LSHIFT_EXPR:
4208 case VEC_WIDEN_MULT_HI_EXPR:
4209 case VEC_WIDEN_MULT_LO_EXPR:
4210 case VEC_WIDEN_MULT_EVEN_EXPR:
4211 case VEC_WIDEN_MULT_ODD_EXPR:
4212 case VEC_UNPACK_HI_EXPR:
4213 case VEC_UNPACK_LO_EXPR:
4214 case VEC_UNPACK_FLOAT_HI_EXPR:
4215 case VEC_UNPACK_FLOAT_LO_EXPR:
4216 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4217 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4218 case VEC_PACK_TRUNC_EXPR:
4219 case VEC_PACK_SAT_EXPR:
4220 case VEC_PACK_FIX_TRUNC_EXPR:
4221 case VEC_PACK_FLOAT_EXPR:
4222 case VEC_WIDEN_LSHIFT_HI_EXPR:
4223 case VEC_WIDEN_LSHIFT_LO_EXPR:
4224 case VEC_DUPLICATE_EXPR:
4225 case VEC_SERIES_EXPR:
4227 return 1;
4229 /* Few special cases of expensive operations. This is useful
4230 to avoid inlining on functions having too many of these. */
4231 case TRUNC_DIV_EXPR:
4232 case CEIL_DIV_EXPR:
4233 case FLOOR_DIV_EXPR:
4234 case ROUND_DIV_EXPR:
4235 case EXACT_DIV_EXPR:
4236 case TRUNC_MOD_EXPR:
4237 case CEIL_MOD_EXPR:
4238 case FLOOR_MOD_EXPR:
4239 case ROUND_MOD_EXPR:
4240 case RDIV_EXPR:
4241 if (TREE_CODE (op2) != INTEGER_CST)
4242 return weights->div_mod_cost;
4243 return 1;
4245 /* Bit-field insertion needs several shift and mask operations. */
4246 case BIT_INSERT_EXPR:
4247 return 3;
4249 default:
4250 /* We expect a copy assignment with no operator. */
4251 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4252 return 0;
4257 /* Estimate number of instructions that will be created by expanding
4258 the statements in the statement sequence STMTS.
4259 WEIGHTS contains weights attributed to various constructs. */
4262 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4264 int cost;
4265 gimple_stmt_iterator gsi;
4267 cost = 0;
4268 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4269 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4271 return cost;
4275 /* Estimate number of instructions that will be created by expanding STMT.
4276 WEIGHTS contains weights attributed to various constructs. */
4279 estimate_num_insns (gimple *stmt, eni_weights *weights)
4281 unsigned cost, i;
4282 enum gimple_code code = gimple_code (stmt);
4283 tree lhs;
4284 tree rhs;
4286 switch (code)
4288 case GIMPLE_ASSIGN:
4289 /* Try to estimate the cost of assignments. We have three cases to
4290 deal with:
4291 1) Simple assignments to registers;
4292 2) Stores to things that must live in memory. This includes
4293 "normal" stores to scalars, but also assignments of large
4294 structures, or constructors of big arrays;
4296 Let us look at the first two cases, assuming we have "a = b + C":
4297 <GIMPLE_ASSIGN <var_decl "a">
4298 <plus_expr <var_decl "b"> <constant C>>
4299 If "a" is a GIMPLE register, the assignment to it is free on almost
4300 any target, because "a" usually ends up in a real register. Hence
4301 the only cost of this expression comes from the PLUS_EXPR, and we
4302 can ignore the GIMPLE_ASSIGN.
4303 If "a" is not a GIMPLE register, the assignment to "a" will most
4304 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4305 of moving something into "a", which we compute using the function
4306 estimate_move_cost. */
4307 if (gimple_clobber_p (stmt))
4308 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4310 lhs = gimple_assign_lhs (stmt);
4311 rhs = gimple_assign_rhs1 (stmt);
4313 cost = 0;
4315 /* Account for the cost of moving to / from memory. */
4316 if (gimple_store_p (stmt))
4317 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4318 if (gimple_assign_load_p (stmt))
4319 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4321 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4322 gimple_assign_rhs1 (stmt),
4323 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4324 == GIMPLE_BINARY_RHS
4325 ? gimple_assign_rhs2 (stmt) : NULL);
4326 break;
4328 case GIMPLE_COND:
4329 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4330 gimple_op (stmt, 0),
4331 gimple_op (stmt, 1));
4332 break;
4334 case GIMPLE_SWITCH:
4336 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4337 /* Take into account cost of the switch + guess 2 conditional jumps for
4338 each case label.
4340 TODO: once the switch expansion logic is sufficiently separated, we can
4341 do better job on estimating cost of the switch. */
4342 if (weights->time_based)
4343 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4344 else
4345 cost = gimple_switch_num_labels (switch_stmt) * 2;
4347 break;
4349 case GIMPLE_CALL:
4351 tree decl;
4353 if (gimple_call_internal_p (stmt))
4354 return 0;
4355 else if ((decl = gimple_call_fndecl (stmt))
4356 && fndecl_built_in_p (decl))
4358 /* Do not special case builtins where we see the body.
4359 This just confuse inliner. */
4360 struct cgraph_node *node;
4361 if (!(node = cgraph_node::get (decl))
4362 || node->definition)
4364 /* For buitins that are likely expanded to nothing or
4365 inlined do not account operand costs. */
4366 else if (is_simple_builtin (decl))
4367 return 0;
4368 else if (is_inexpensive_builtin (decl))
4369 return weights->target_builtin_call_cost;
4370 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4372 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4373 specialize the cheap expansion we do here.
4374 ??? This asks for a more general solution. */
4375 switch (DECL_FUNCTION_CODE (decl))
4377 case BUILT_IN_POW:
4378 case BUILT_IN_POWF:
4379 case BUILT_IN_POWL:
4380 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4381 && (real_equal
4382 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4383 &dconst2)))
4384 return estimate_operator_cost
4385 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4386 gimple_call_arg (stmt, 0));
4387 break;
4389 default:
4390 break;
4395 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4396 if (gimple_call_lhs (stmt))
4397 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4398 weights->time_based);
4399 for (i = 0; i < gimple_call_num_args (stmt); i++)
4401 tree arg = gimple_call_arg (stmt, i);
4402 cost += estimate_move_cost (TREE_TYPE (arg),
4403 weights->time_based);
4405 break;
4408 case GIMPLE_RETURN:
4409 return weights->return_cost;
4411 case GIMPLE_GOTO:
4412 case GIMPLE_LABEL:
4413 case GIMPLE_NOP:
4414 case GIMPLE_PHI:
4415 case GIMPLE_PREDICT:
4416 case GIMPLE_DEBUG:
4417 return 0;
4419 case GIMPLE_ASM:
4421 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4422 /* 1000 means infinity. This avoids overflows later
4423 with very long asm statements. */
4424 if (count > 1000)
4425 count = 1000;
4426 /* If this asm is asm inline, count anything as minimum size. */
4427 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4428 count = MIN (1, count);
4429 return MAX (1, count);
4432 case GIMPLE_RESX:
4433 /* This is either going to be an external function call with one
4434 argument, or two register copy statements plus a goto. */
4435 return 2;
4437 case GIMPLE_EH_DISPATCH:
4438 /* ??? This is going to turn into a switch statement. Ideally
4439 we'd have a look at the eh region and estimate the number of
4440 edges involved. */
4441 return 10;
4443 case GIMPLE_BIND:
4444 return estimate_num_insns_seq (
4445 gimple_bind_body (as_a <gbind *> (stmt)),
4446 weights);
4448 case GIMPLE_EH_FILTER:
4449 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4451 case GIMPLE_CATCH:
4452 return estimate_num_insns_seq (gimple_catch_handler (
4453 as_a <gcatch *> (stmt)),
4454 weights);
4456 case GIMPLE_TRY:
4457 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4458 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4460 /* OMP directives are generally very expensive. */
4462 case GIMPLE_OMP_RETURN:
4463 case GIMPLE_OMP_SECTIONS_SWITCH:
4464 case GIMPLE_OMP_ATOMIC_STORE:
4465 case GIMPLE_OMP_CONTINUE:
4466 /* ...except these, which are cheap. */
4467 return 0;
4469 case GIMPLE_OMP_ATOMIC_LOAD:
4470 return weights->omp_cost;
4472 case GIMPLE_OMP_FOR:
4473 return (weights->omp_cost
4474 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4475 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4477 case GIMPLE_OMP_PARALLEL:
4478 case GIMPLE_OMP_TASK:
4479 case GIMPLE_OMP_CRITICAL:
4480 case GIMPLE_OMP_MASTER:
4481 case GIMPLE_OMP_TASKGROUP:
4482 case GIMPLE_OMP_ORDERED:
4483 case GIMPLE_OMP_SCAN:
4484 case GIMPLE_OMP_SECTION:
4485 case GIMPLE_OMP_SECTIONS:
4486 case GIMPLE_OMP_SINGLE:
4487 case GIMPLE_OMP_TARGET:
4488 case GIMPLE_OMP_TEAMS:
4489 return (weights->omp_cost
4490 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4492 case GIMPLE_TRANSACTION:
4493 return (weights->tm_cost
4494 + estimate_num_insns_seq (gimple_transaction_body (
4495 as_a <gtransaction *> (stmt)),
4496 weights));
4498 default:
4499 gcc_unreachable ();
4502 return cost;
4505 /* Estimate number of instructions that will be created by expanding
4506 function FNDECL. WEIGHTS contains weights attributed to various
4507 constructs. */
4510 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4512 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4513 gimple_stmt_iterator bsi;
4514 basic_block bb;
4515 int n = 0;
4517 gcc_assert (my_function && my_function->cfg);
4518 FOR_EACH_BB_FN (bb, my_function)
4520 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4521 n += estimate_num_insns (gsi_stmt (bsi), weights);
4524 return n;
4528 /* Initializes weights used by estimate_num_insns. */
4530 void
4531 init_inline_once (void)
4533 eni_size_weights.call_cost = 1;
4534 eni_size_weights.indirect_call_cost = 3;
4535 eni_size_weights.target_builtin_call_cost = 1;
4536 eni_size_weights.div_mod_cost = 1;
4537 eni_size_weights.omp_cost = 40;
4538 eni_size_weights.tm_cost = 10;
4539 eni_size_weights.time_based = false;
4540 eni_size_weights.return_cost = 1;
4542 /* Estimating time for call is difficult, since we have no idea what the
4543 called function does. In the current uses of eni_time_weights,
4544 underestimating the cost does less harm than overestimating it, so
4545 we choose a rather small value here. */
4546 eni_time_weights.call_cost = 10;
4547 eni_time_weights.indirect_call_cost = 15;
4548 eni_time_weights.target_builtin_call_cost = 1;
4549 eni_time_weights.div_mod_cost = 10;
4550 eni_time_weights.omp_cost = 40;
4551 eni_time_weights.tm_cost = 40;
4552 eni_time_weights.time_based = true;
4553 eni_time_weights.return_cost = 2;
4557 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4559 static void
4560 prepend_lexical_block (tree current_block, tree new_block)
4562 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4563 BLOCK_SUBBLOCKS (current_block) = new_block;
4564 BLOCK_SUPERCONTEXT (new_block) = current_block;
4567 /* Add local variables from CALLEE to CALLER. */
4569 static inline void
4570 add_local_variables (struct function *callee, struct function *caller,
4571 copy_body_data *id)
4573 tree var;
4574 unsigned ix;
4576 FOR_EACH_LOCAL_DECL (callee, ix, var)
4577 if (!can_be_nonlocal (var, id))
4579 tree new_var = remap_decl (var, id);
4581 /* Remap debug-expressions. */
4582 if (VAR_P (new_var)
4583 && DECL_HAS_DEBUG_EXPR_P (var)
4584 && new_var != var)
4586 tree tem = DECL_DEBUG_EXPR (var);
4587 bool old_regimplify = id->regimplify;
4588 id->remapping_type_depth++;
4589 walk_tree (&tem, copy_tree_body_r, id, NULL);
4590 id->remapping_type_depth--;
4591 id->regimplify = old_regimplify;
4592 SET_DECL_DEBUG_EXPR (new_var, tem);
4593 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4595 add_local_decl (caller, new_var);
4599 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4600 have brought in or introduced any debug stmts for SRCVAR. */
4602 static inline void
4603 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4605 tree *remappedvarp = id->decl_map->get (srcvar);
4607 if (!remappedvarp)
4608 return;
4610 if (!VAR_P (*remappedvarp))
4611 return;
4613 if (*remappedvarp == id->retvar)
4614 return;
4616 tree tvar = target_for_debug_bind (*remappedvarp);
4617 if (!tvar)
4618 return;
4620 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4621 id->call_stmt);
4622 gimple_seq_add_stmt (bindings, stmt);
4625 /* For each inlined variable for which we may have debug bind stmts,
4626 add before GSI a final debug stmt resetting it, marking the end of
4627 its life, so that var-tracking knows it doesn't have to compute
4628 further locations for it. */
4630 static inline void
4631 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4633 tree var;
4634 unsigned ix;
4635 gimple_seq bindings = NULL;
4637 if (!gimple_in_ssa_p (id->src_cfun))
4638 return;
4640 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4641 return;
4643 for (var = DECL_ARGUMENTS (id->src_fn);
4644 var; var = DECL_CHAIN (var))
4645 reset_debug_binding (id, var, &bindings);
4647 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4648 reset_debug_binding (id, var, &bindings);
4650 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4653 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4655 static bool
4656 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4657 bitmap to_purge)
4659 tree use_retvar;
4660 tree fn;
4661 hash_map<tree, tree> *dst;
4662 hash_map<tree, tree> *st = NULL;
4663 tree return_slot;
4664 tree modify_dest;
4665 struct cgraph_edge *cg_edge;
4666 cgraph_inline_failed_t reason;
4667 basic_block return_block;
4668 edge e;
4669 gimple_stmt_iterator gsi, stmt_gsi;
4670 bool successfully_inlined = false;
4671 bool purge_dead_abnormal_edges;
4672 gcall *call_stmt;
4673 unsigned int prop_mask, src_properties;
4674 struct function *dst_cfun;
4675 tree simduid;
4676 use_operand_p use;
4677 gimple *simtenter_stmt = NULL;
4678 vec<tree> *simtvars_save;
4680 /* The gimplifier uses input_location in too many places, such as
4681 internal_get_tmp_var (). */
4682 location_t saved_location = input_location;
4683 input_location = gimple_location (stmt);
4685 /* From here on, we're only interested in CALL_EXPRs. */
4686 call_stmt = dyn_cast <gcall *> (stmt);
4687 if (!call_stmt)
4688 goto egress;
4690 cg_edge = id->dst_node->get_edge (stmt);
4691 gcc_checking_assert (cg_edge);
4692 /* First, see if we can figure out what function is being called.
4693 If we cannot, then there is no hope of inlining the function. */
4694 if (cg_edge->indirect_unknown_callee)
4695 goto egress;
4696 fn = cg_edge->callee->decl;
4697 gcc_checking_assert (fn);
4699 /* If FN is a declaration of a function in a nested scope that was
4700 globally declared inline, we don't set its DECL_INITIAL.
4701 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4702 C++ front-end uses it for cdtors to refer to their internal
4703 declarations, that are not real functions. Fortunately those
4704 don't have trees to be saved, so we can tell by checking their
4705 gimple_body. */
4706 if (!DECL_INITIAL (fn)
4707 && DECL_ABSTRACT_ORIGIN (fn)
4708 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4709 fn = DECL_ABSTRACT_ORIGIN (fn);
4711 /* Don't try to inline functions that are not well-suited to inlining. */
4712 if (cg_edge->inline_failed)
4714 reason = cg_edge->inline_failed;
4715 /* If this call was originally indirect, we do not want to emit any
4716 inlining related warnings or sorry messages because there are no
4717 guarantees regarding those. */
4718 if (cg_edge->indirect_inlining_edge)
4719 goto egress;
4721 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4722 /* For extern inline functions that get redefined we always
4723 silently ignored always_inline flag. Better behavior would
4724 be to be able to keep both bodies and use extern inline body
4725 for inlining, but we can't do that because frontends overwrite
4726 the body. */
4727 && !cg_edge->callee->redefined_extern_inline
4728 /* During early inline pass, report only when optimization is
4729 not turned on. */
4730 && (symtab->global_info_ready
4731 || !optimize
4732 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4733 /* PR 20090218-1_0.c. Body can be provided by another module. */
4734 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4736 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4737 cgraph_inline_failed_string (reason));
4738 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4739 inform (gimple_location (stmt), "called from here");
4740 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4741 inform (DECL_SOURCE_LOCATION (cfun->decl),
4742 "called from this function");
4744 else if (opt_for_fn (fn, warn_inline)
4745 && DECL_DECLARED_INLINE_P (fn)
4746 && !DECL_NO_INLINE_WARNING_P (fn)
4747 && !DECL_IN_SYSTEM_HEADER (fn)
4748 && reason != CIF_UNSPECIFIED
4749 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4750 /* Do not warn about not inlined recursive calls. */
4751 && !cg_edge->recursive_p ()
4752 /* Avoid warnings during early inline pass. */
4753 && symtab->global_info_ready)
4755 auto_diagnostic_group d;
4756 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4757 fn, _(cgraph_inline_failed_string (reason))))
4759 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4760 inform (gimple_location (stmt), "called from here");
4761 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4762 inform (DECL_SOURCE_LOCATION (cfun->decl),
4763 "called from this function");
4766 goto egress;
4768 id->src_node = cg_edge->callee;
4770 /* If callee is thunk, all we need is to adjust the THIS pointer
4771 and redirect to function being thunked. */
4772 if (id->src_node->thunk.thunk_p)
4774 cgraph_edge *edge;
4775 tree virtual_offset = NULL;
4776 profile_count count = cg_edge->count;
4777 tree op;
4778 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4780 cgraph_edge::remove (cg_edge);
4781 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4782 gimple_uid (stmt),
4783 profile_count::one (),
4784 profile_count::one (),
4785 true);
4786 edge->count = count;
4787 if (id->src_node->thunk.virtual_offset_p)
4788 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4789 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4790 NULL);
4791 gsi_insert_before (&iter, gimple_build_assign (op,
4792 gimple_call_arg (stmt, 0)),
4793 GSI_NEW_STMT);
4794 gcc_assert (id->src_node->thunk.this_adjusting);
4795 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4796 virtual_offset, id->src_node->thunk.indirect_offset);
4798 gimple_call_set_arg (stmt, 0, op);
4799 gimple_call_set_fndecl (stmt, edge->callee->decl);
4800 update_stmt (stmt);
4801 id->src_node->remove ();
4802 expand_call_inline (bb, stmt, id, to_purge);
4803 maybe_remove_unused_call_args (cfun, stmt);
4804 return true;
4806 fn = cg_edge->callee->decl;
4807 cg_edge->callee->get_untransformed_body ();
4809 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4810 cg_edge->callee->verify ();
4812 /* We will be inlining this callee. */
4813 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4815 /* Update the callers EH personality. */
4816 if (DECL_FUNCTION_PERSONALITY (fn))
4817 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4818 = DECL_FUNCTION_PERSONALITY (fn);
4820 /* Split the block before the GIMPLE_CALL. */
4821 stmt_gsi = gsi_for_stmt (stmt);
4822 gsi_prev (&stmt_gsi);
4823 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4824 bb = e->src;
4825 return_block = e->dest;
4826 remove_edge (e);
4828 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4829 been the source of abnormal edges. In this case, schedule
4830 the removal of dead abnormal edges. */
4831 gsi = gsi_start_bb (return_block);
4832 gsi_next (&gsi);
4833 purge_dead_abnormal_edges = gsi_end_p (gsi);
4835 stmt_gsi = gsi_start_bb (return_block);
4837 /* Build a block containing code to initialize the arguments, the
4838 actual inline expansion of the body, and a label for the return
4839 statements within the function to jump to. The type of the
4840 statement expression is the return type of the function call.
4841 ??? If the call does not have an associated block then we will
4842 remap all callee blocks to NULL, effectively dropping most of
4843 its debug information. This should only happen for calls to
4844 artificial decls inserted by the compiler itself. We need to
4845 either link the inlined blocks into the caller block tree or
4846 not refer to them in any way to not break GC for locations. */
4847 if (tree block = gimple_block (stmt))
4849 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4850 to make inlined_function_outer_scope_p return true on this BLOCK. */
4851 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4852 if (loc == UNKNOWN_LOCATION)
4853 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4854 if (loc == UNKNOWN_LOCATION)
4855 loc = BUILTINS_LOCATION;
4856 id->block = make_node (BLOCK);
4857 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4858 BLOCK_SOURCE_LOCATION (id->block) = loc;
4859 prepend_lexical_block (block, id->block);
4862 /* Local declarations will be replaced by their equivalents in this map. */
4863 st = id->decl_map;
4864 id->decl_map = new hash_map<tree, tree>;
4865 dst = id->debug_map;
4866 id->debug_map = NULL;
4867 if (flag_stack_reuse != SR_NONE)
4868 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4870 /* Record the function we are about to inline. */
4871 id->src_fn = fn;
4872 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4873 id->reset_location = DECL_IGNORED_P (fn);
4874 id->call_stmt = call_stmt;
4876 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4877 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4878 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4879 simtvars_save = id->dst_simt_vars;
4880 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4881 && (simduid = bb->loop_father->simduid) != NULL_TREE
4882 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4883 && single_imm_use (simduid, &use, &simtenter_stmt)
4884 && is_gimple_call (simtenter_stmt)
4885 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4886 vec_alloc (id->dst_simt_vars, 0);
4887 else
4888 id->dst_simt_vars = NULL;
4890 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4891 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4893 /* If the src function contains an IFN_VA_ARG, then so will the dst
4894 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4895 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4896 src_properties = id->src_cfun->curr_properties & prop_mask;
4897 if (src_properties != prop_mask)
4898 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4899 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4901 gcc_assert (!id->src_cfun->after_inlining);
4903 id->entry_bb = bb;
4904 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4906 gimple_stmt_iterator si = gsi_last_bb (bb);
4907 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4908 NOT_TAKEN),
4909 GSI_NEW_STMT);
4911 initialize_inlined_parameters (id, stmt, fn, bb);
4912 if (debug_nonbind_markers_p && debug_inline_points && id->block
4913 && inlined_function_outer_scope_p (id->block))
4915 gimple_stmt_iterator si = gsi_last_bb (bb);
4916 gsi_insert_after (&si, gimple_build_debug_inline_entry
4917 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4918 GSI_NEW_STMT);
4921 if (DECL_INITIAL (fn))
4923 if (gimple_block (stmt))
4925 tree *var;
4927 prepend_lexical_block (id->block,
4928 remap_blocks (DECL_INITIAL (fn), id));
4929 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4930 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4931 == NULL_TREE));
4932 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4933 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4934 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4935 under it. The parameters can be then evaluated in the debugger,
4936 but don't show in backtraces. */
4937 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4938 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4940 tree v = *var;
4941 *var = TREE_CHAIN (v);
4942 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4943 BLOCK_VARS (id->block) = v;
4945 else
4946 var = &TREE_CHAIN (*var);
4948 else
4949 remap_blocks_to_null (DECL_INITIAL (fn), id);
4952 /* Return statements in the function body will be replaced by jumps
4953 to the RET_LABEL. */
4954 gcc_assert (DECL_INITIAL (fn));
4955 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4957 /* Find the LHS to which the result of this call is assigned. */
4958 return_slot = NULL;
4959 if (gimple_call_lhs (stmt))
4961 modify_dest = gimple_call_lhs (stmt);
4963 /* The function which we are inlining might not return a value,
4964 in which case we should issue a warning that the function
4965 does not return a value. In that case the optimizers will
4966 see that the variable to which the value is assigned was not
4967 initialized. We do not want to issue a warning about that
4968 uninitialized variable. */
4969 if (DECL_P (modify_dest))
4970 TREE_NO_WARNING (modify_dest) = 1;
4972 if (gimple_call_return_slot_opt_p (call_stmt))
4974 return_slot = modify_dest;
4975 modify_dest = NULL;
4978 else
4979 modify_dest = NULL;
4981 /* If we are inlining a call to the C++ operator new, we don't want
4982 to use type based alias analysis on the return value. Otherwise
4983 we may get confused if the compiler sees that the inlined new
4984 function returns a pointer which was just deleted. See bug
4985 33407. */
4986 if (DECL_IS_OPERATOR_NEW_P (fn))
4988 return_slot = NULL;
4989 modify_dest = NULL;
4992 /* Declare the return variable for the function. */
4993 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4995 /* Add local vars in this inlined callee to caller. */
4996 add_local_variables (id->src_cfun, cfun, id);
4998 if (id->src_node->clone.performed_splits)
5000 /* Any calls from the inlined function will be turned into calls from the
5001 function we inline into. We must preserve notes about how to split
5002 parameters such calls should be redirected/updated. */
5003 unsigned len = vec_safe_length (id->src_node->clone.performed_splits);
5004 for (unsigned i = 0; i < len; i++)
5006 ipa_param_performed_split ps
5007 = (*id->src_node->clone.performed_splits)[i];
5008 ps.dummy_decl = remap_decl (ps.dummy_decl, id);
5009 vec_safe_push (id->dst_node->clone.performed_splits, ps);
5012 if (flag_checking)
5014 len = vec_safe_length (id->dst_node->clone.performed_splits);
5015 for (unsigned i = 0; i < len; i++)
5017 ipa_param_performed_split *ps1
5018 = &(*id->dst_node->clone.performed_splits)[i];
5019 for (unsigned j = i + 1; j < len; j++)
5021 ipa_param_performed_split *ps2
5022 = &(*id->dst_node->clone.performed_splits)[j];
5023 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
5024 || ps1->unit_offset != ps2->unit_offset);
5030 if (dump_enabled_p ())
5032 char buf[128];
5033 snprintf (buf, sizeof(buf), "%4.2f",
5034 cg_edge->sreal_frequency ().to_double ());
5035 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5036 call_stmt,
5037 "Inlining %C to %C with frequency %s\n",
5038 id->src_node, id->dst_node, buf);
5039 if (dump_file && (dump_flags & TDF_DETAILS))
5041 id->src_node->dump (dump_file);
5042 id->dst_node->dump (dump_file);
5046 /* This is it. Duplicate the callee body. Assume callee is
5047 pre-gimplified. Note that we must not alter the caller
5048 function in any way before this point, as this CALL_EXPR may be
5049 a self-referential call; if we're calling ourselves, we need to
5050 duplicate our body before altering anything. */
5051 copy_body (id, bb, return_block, NULL);
5053 reset_debug_bindings (id, stmt_gsi);
5055 if (flag_stack_reuse != SR_NONE)
5056 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5057 if (!TREE_THIS_VOLATILE (p))
5059 tree *varp = id->decl_map->get (p);
5060 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
5062 tree clobber = build_clobber (TREE_TYPE (*varp));
5063 gimple *clobber_stmt;
5064 clobber_stmt = gimple_build_assign (*varp, clobber);
5065 gimple_set_location (clobber_stmt, gimple_location (stmt));
5066 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5070 /* Reset the escaped solution. */
5071 if (cfun->gimple_df)
5072 pt_solution_reset (&cfun->gimple_df->escaped);
5074 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5075 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5077 size_t nargs = gimple_call_num_args (simtenter_stmt);
5078 vec<tree> *vars = id->dst_simt_vars;
5079 auto_vec<tree> newargs (nargs + vars->length ());
5080 for (size_t i = 0; i < nargs; i++)
5081 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5082 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5084 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5085 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5087 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5088 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5089 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5090 gsi_replace (&gsi, g, false);
5092 vec_free (id->dst_simt_vars);
5093 id->dst_simt_vars = simtvars_save;
5095 /* Clean up. */
5096 if (id->debug_map)
5098 delete id->debug_map;
5099 id->debug_map = dst;
5101 delete id->decl_map;
5102 id->decl_map = st;
5104 /* Unlink the calls virtual operands before replacing it. */
5105 unlink_stmt_vdef (stmt);
5106 if (gimple_vdef (stmt)
5107 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5108 release_ssa_name (gimple_vdef (stmt));
5110 /* If the inlined function returns a result that we care about,
5111 substitute the GIMPLE_CALL with an assignment of the return
5112 variable to the LHS of the call. That is, if STMT was
5113 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5114 if (use_retvar && gimple_call_lhs (stmt))
5116 gimple *old_stmt = stmt;
5117 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5118 gimple_set_location (stmt, gimple_location (old_stmt));
5119 gsi_replace (&stmt_gsi, stmt, false);
5120 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5121 /* Append a clobber for id->retvar if easily possible. */
5122 if (flag_stack_reuse != SR_NONE
5123 && id->retvar
5124 && VAR_P (id->retvar)
5125 && id->retvar != return_slot
5126 && id->retvar != modify_dest
5127 && !TREE_THIS_VOLATILE (id->retvar)
5128 && !is_gimple_reg (id->retvar)
5129 && !stmt_ends_bb_p (stmt))
5131 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5132 gimple *clobber_stmt;
5133 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5134 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5135 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5138 else
5140 /* Handle the case of inlining a function with no return
5141 statement, which causes the return value to become undefined. */
5142 if (gimple_call_lhs (stmt)
5143 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5145 tree name = gimple_call_lhs (stmt);
5146 tree var = SSA_NAME_VAR (name);
5147 tree def = var ? ssa_default_def (cfun, var) : NULL;
5149 if (def)
5151 /* If the variable is used undefined, make this name
5152 undefined via a move. */
5153 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5154 gsi_replace (&stmt_gsi, stmt, true);
5156 else
5158 if (!var)
5160 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5161 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5163 /* Otherwise make this variable undefined. */
5164 gsi_remove (&stmt_gsi, true);
5165 set_ssa_default_def (cfun, var, name);
5166 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5169 /* Replace with a clobber for id->retvar. */
5170 else if (flag_stack_reuse != SR_NONE
5171 && id->retvar
5172 && VAR_P (id->retvar)
5173 && id->retvar != return_slot
5174 && id->retvar != modify_dest
5175 && !TREE_THIS_VOLATILE (id->retvar)
5176 && !is_gimple_reg (id->retvar))
5178 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5179 gimple *clobber_stmt;
5180 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5181 gimple_set_location (clobber_stmt, gimple_location (stmt));
5182 gsi_replace (&stmt_gsi, clobber_stmt, false);
5183 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5185 else
5186 gsi_remove (&stmt_gsi, true);
5189 if (purge_dead_abnormal_edges)
5190 bitmap_set_bit (to_purge, return_block->index);
5192 /* If the value of the new expression is ignored, that's OK. We
5193 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5194 the equivalent inlined version either. */
5195 if (is_gimple_assign (stmt))
5197 gcc_assert (gimple_assign_single_p (stmt)
5198 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5199 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5202 id->add_clobbers_to_eh_landing_pads = 0;
5204 /* Output the inlining info for this abstract function, since it has been
5205 inlined. If we don't do this now, we can lose the information about the
5206 variables in the function when the blocks get blown away as soon as we
5207 remove the cgraph node. */
5208 if (gimple_block (stmt))
5209 (*debug_hooks->outlining_inline_function) (fn);
5211 /* Update callgraph if needed. */
5212 cg_edge->callee->remove ();
5214 id->block = NULL_TREE;
5215 id->retvar = NULL_TREE;
5216 successfully_inlined = true;
5218 egress:
5219 input_location = saved_location;
5220 return successfully_inlined;
5223 /* Expand call statements reachable from STMT_P.
5224 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5225 in a MODIFY_EXPR. */
5227 static bool
5228 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5229 bitmap to_purge)
5231 gimple_stmt_iterator gsi;
5232 bool inlined = false;
5234 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5236 gimple *stmt = gsi_stmt (gsi);
5237 gsi_prev (&gsi);
5239 if (is_gimple_call (stmt)
5240 && !gimple_call_internal_p (stmt))
5241 inlined |= expand_call_inline (bb, stmt, id, to_purge);
5244 return inlined;
5248 /* Walk all basic blocks created after FIRST and try to fold every statement
5249 in the STATEMENTS pointer set. */
5251 static void
5252 fold_marked_statements (int first, hash_set<gimple *> *statements)
5254 auto_bitmap to_purge;
5255 for (; first < last_basic_block_for_fn (cfun); first++)
5256 if (BASIC_BLOCK_FOR_FN (cfun, first))
5258 gimple_stmt_iterator gsi;
5260 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5261 !gsi_end_p (gsi);
5262 gsi_next (&gsi))
5263 if (statements->contains (gsi_stmt (gsi)))
5265 gimple *old_stmt = gsi_stmt (gsi);
5266 tree old_decl
5267 = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5269 if (old_decl && fndecl_built_in_p (old_decl))
5271 /* Folding builtins can create multiple instructions,
5272 we need to look at all of them. */
5273 gimple_stmt_iterator i2 = gsi;
5274 gsi_prev (&i2);
5275 if (fold_stmt (&gsi))
5277 gimple *new_stmt;
5278 /* If a builtin at the end of a bb folded into nothing,
5279 the following loop won't work. */
5280 if (gsi_end_p (gsi))
5282 cgraph_update_edges_for_call_stmt (old_stmt,
5283 old_decl, NULL);
5284 break;
5286 if (gsi_end_p (i2))
5287 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5288 else
5289 gsi_next (&i2);
5290 while (1)
5292 new_stmt = gsi_stmt (i2);
5293 update_stmt (new_stmt);
5294 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5295 new_stmt);
5297 if (new_stmt == gsi_stmt (gsi))
5299 /* It is okay to check only for the very last
5300 of these statements. If it is a throwing
5301 statement nothing will change. If it isn't
5302 this can remove EH edges. If that weren't
5303 correct then because some intermediate stmts
5304 throw, but not the last one. That would mean
5305 we'd have to split the block, which we can't
5306 here and we'd loose anyway. And as builtins
5307 probably never throw, this all
5308 is mood anyway. */
5309 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5310 new_stmt))
5311 bitmap_set_bit (to_purge, first);
5312 break;
5314 gsi_next (&i2);
5318 else if (fold_stmt (&gsi))
5320 /* Re-read the statement from GSI as fold_stmt() may
5321 have changed it. */
5322 gimple *new_stmt = gsi_stmt (gsi);
5323 update_stmt (new_stmt);
5325 if (is_gimple_call (old_stmt)
5326 || is_gimple_call (new_stmt))
5327 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5328 new_stmt);
5330 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5331 bitmap_set_bit (to_purge, first);
5335 gimple_purge_all_dead_eh_edges (to_purge);
5338 /* Expand calls to inline functions in the body of FN. */
5340 unsigned int
5341 optimize_inline_calls (tree fn)
5343 copy_body_data id;
5344 basic_block bb;
5345 int last = n_basic_blocks_for_fn (cfun);
5346 bool inlined_p = false;
5348 /* Clear out ID. */
5349 memset (&id, 0, sizeof (id));
5351 id.src_node = id.dst_node = cgraph_node::get (fn);
5352 gcc_assert (id.dst_node->definition);
5353 id.dst_fn = fn;
5354 /* Or any functions that aren't finished yet. */
5355 if (current_function_decl)
5356 id.dst_fn = current_function_decl;
5358 id.copy_decl = copy_decl_maybe_to_var;
5359 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5360 id.transform_new_cfg = false;
5361 id.transform_return_to_modify = true;
5362 id.transform_parameter = true;
5363 id.transform_lang_insert_block = NULL;
5364 id.statements_to_fold = new hash_set<gimple *>;
5366 push_gimplify_context ();
5368 /* We make no attempts to keep dominance info up-to-date. */
5369 free_dominance_info (CDI_DOMINATORS);
5370 free_dominance_info (CDI_POST_DOMINATORS);
5372 /* Register specific gimple functions. */
5373 gimple_register_cfg_hooks ();
5375 /* Reach the trees by walking over the CFG, and note the
5376 enclosing basic-blocks in the call edges. */
5377 /* We walk the blocks going forward, because inlined function bodies
5378 will split id->current_basic_block, and the new blocks will
5379 follow it; we'll trudge through them, processing their CALL_EXPRs
5380 along the way. */
5381 auto_bitmap to_purge;
5382 FOR_EACH_BB_FN (bb, cfun)
5383 inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5385 pop_gimplify_context (NULL);
5387 if (flag_checking)
5389 struct cgraph_edge *e;
5391 id.dst_node->verify ();
5393 /* Double check that we inlined everything we are supposed to inline. */
5394 for (e = id.dst_node->callees; e; e = e->next_callee)
5395 gcc_assert (e->inline_failed);
5398 /* Fold queued statements. */
5399 update_max_bb_count ();
5400 fold_marked_statements (last, id.statements_to_fold);
5401 delete id.statements_to_fold;
5403 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5404 We need to do this after fold_marked_statements since that may walk
5405 the SSA use-def chain. */
5406 unsigned i;
5407 bitmap_iterator bi;
5408 EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5410 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5411 if (bb)
5413 gimple_purge_dead_eh_edges (bb);
5414 gimple_purge_dead_abnormal_call_edges (bb);
5418 gcc_assert (!id.debug_stmts.exists ());
5420 /* If we didn't inline into the function there is nothing to do. */
5421 if (!inlined_p)
5422 return 0;
5424 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5425 number_blocks (fn);
5427 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5429 if (flag_checking)
5430 id.dst_node->verify ();
5432 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5433 not possible yet - the IPA passes might make various functions to not
5434 throw and they don't care to proactively update local EH info. This is
5435 done later in fixup_cfg pass that also execute the verification. */
5436 return (TODO_update_ssa
5437 | TODO_cleanup_cfg
5438 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5439 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5440 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5441 ? TODO_rebuild_frequencies : 0));
5444 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5446 tree
5447 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5449 enum tree_code code = TREE_CODE (*tp);
5450 enum tree_code_class cl = TREE_CODE_CLASS (code);
5452 /* We make copies of most nodes. */
5453 if (IS_EXPR_CODE_CLASS (cl)
5454 || code == TREE_LIST
5455 || code == TREE_VEC
5456 || code == TYPE_DECL
5457 || code == OMP_CLAUSE)
5459 /* Because the chain gets clobbered when we make a copy, we save it
5460 here. */
5461 tree chain = NULL_TREE, new_tree;
5463 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5464 chain = TREE_CHAIN (*tp);
5466 /* Copy the node. */
5467 new_tree = copy_node (*tp);
5469 *tp = new_tree;
5471 /* Now, restore the chain, if appropriate. That will cause
5472 walk_tree to walk into the chain as well. */
5473 if (code == PARM_DECL
5474 || code == TREE_LIST
5475 || code == OMP_CLAUSE)
5476 TREE_CHAIN (*tp) = chain;
5478 /* For now, we don't update BLOCKs when we make copies. So, we
5479 have to nullify all BIND_EXPRs. */
5480 if (TREE_CODE (*tp) == BIND_EXPR)
5481 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5483 else if (code == CONSTRUCTOR)
5485 /* CONSTRUCTOR nodes need special handling because
5486 we need to duplicate the vector of elements. */
5487 tree new_tree;
5489 new_tree = copy_node (*tp);
5490 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5491 *tp = new_tree;
5493 else if (code == STATEMENT_LIST)
5494 /* We used to just abort on STATEMENT_LIST, but we can run into them
5495 with statement-expressions (c++/40975). */
5496 copy_statement_list (tp);
5497 else if (TREE_CODE_CLASS (code) == tcc_type)
5498 *walk_subtrees = 0;
5499 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5500 *walk_subtrees = 0;
5501 else if (TREE_CODE_CLASS (code) == tcc_constant)
5502 *walk_subtrees = 0;
5503 return NULL_TREE;
5506 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5507 information indicating to what new SAVE_EXPR this one should be mapped,
5508 use that one. Otherwise, create a new node and enter it in ST. FN is
5509 the function into which the copy will be placed. */
5511 static void
5512 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5514 tree *n;
5515 tree t;
5517 /* See if we already encountered this SAVE_EXPR. */
5518 n = st->get (*tp);
5520 /* If we didn't already remap this SAVE_EXPR, do so now. */
5521 if (!n)
5523 t = copy_node (*tp);
5525 /* Remember this SAVE_EXPR. */
5526 st->put (*tp, t);
5527 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5528 st->put (t, t);
5530 else
5532 /* We've already walked into this SAVE_EXPR; don't do it again. */
5533 *walk_subtrees = 0;
5534 t = *n;
5537 /* Replace this SAVE_EXPR with the copy. */
5538 *tp = t;
5541 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5542 label, copies the declaration and enters it in the splay_tree in DATA (which
5543 is really a 'copy_body_data *'. */
5545 static tree
5546 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5547 bool *handled_ops_p ATTRIBUTE_UNUSED,
5548 struct walk_stmt_info *wi)
5550 copy_body_data *id = (copy_body_data *) wi->info;
5551 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5553 if (stmt)
5555 tree decl = gimple_label_label (stmt);
5557 /* Copy the decl and remember the copy. */
5558 insert_decl_map (id, decl, id->copy_decl (decl, id));
5561 return NULL_TREE;
5564 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5565 struct walk_stmt_info *wi);
5567 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5568 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5569 remaps all local declarations to appropriate replacements in gimple
5570 operands. */
5572 static tree
5573 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5575 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5576 copy_body_data *id = (copy_body_data *) wi->info;
5577 hash_map<tree, tree> *st = id->decl_map;
5578 tree *n;
5579 tree expr = *tp;
5581 /* For recursive invocations this is no longer the LHS itself. */
5582 bool is_lhs = wi->is_lhs;
5583 wi->is_lhs = false;
5585 if (TREE_CODE (expr) == SSA_NAME)
5587 *tp = remap_ssa_name (*tp, id);
5588 *walk_subtrees = 0;
5589 if (is_lhs)
5590 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5592 /* Only a local declaration (variable or label). */
5593 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5594 || TREE_CODE (expr) == LABEL_DECL)
5596 /* Lookup the declaration. */
5597 n = st->get (expr);
5599 /* If it's there, remap it. */
5600 if (n)
5601 *tp = *n;
5602 *walk_subtrees = 0;
5604 else if (TREE_CODE (expr) == STATEMENT_LIST
5605 || TREE_CODE (expr) == BIND_EXPR
5606 || TREE_CODE (expr) == SAVE_EXPR)
5607 gcc_unreachable ();
5608 else if (TREE_CODE (expr) == TARGET_EXPR)
5610 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5611 It's OK for this to happen if it was part of a subtree that
5612 isn't immediately expanded, such as operand 2 of another
5613 TARGET_EXPR. */
5614 if (!TREE_OPERAND (expr, 1))
5616 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5617 TREE_OPERAND (expr, 3) = NULL_TREE;
5620 else if (TREE_CODE (expr) == OMP_CLAUSE)
5622 /* Before the omplower pass completes, some OMP clauses can contain
5623 sequences that are neither copied by gimple_seq_copy nor walked by
5624 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5625 in those situations, we have to copy and process them explicitely. */
5627 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5629 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5630 seq = duplicate_remap_omp_clause_seq (seq, wi);
5631 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5633 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5635 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5636 seq = duplicate_remap_omp_clause_seq (seq, wi);
5637 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5639 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5641 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5642 seq = duplicate_remap_omp_clause_seq (seq, wi);
5643 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5644 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5645 seq = duplicate_remap_omp_clause_seq (seq, wi);
5646 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5650 /* Keep iterating. */
5651 return NULL_TREE;
5655 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5656 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5657 remaps all local declarations to appropriate replacements in gimple
5658 statements. */
5660 static tree
5661 replace_locals_stmt (gimple_stmt_iterator *gsip,
5662 bool *handled_ops_p ATTRIBUTE_UNUSED,
5663 struct walk_stmt_info *wi)
5665 copy_body_data *id = (copy_body_data *) wi->info;
5666 gimple *gs = gsi_stmt (*gsip);
5668 if (gbind *stmt = dyn_cast <gbind *> (gs))
5670 tree block = gimple_bind_block (stmt);
5672 if (block)
5674 remap_block (&block, id);
5675 gimple_bind_set_block (stmt, block);
5678 /* This will remap a lot of the same decls again, but this should be
5679 harmless. */
5680 if (gimple_bind_vars (stmt))
5682 tree old_var, decls = gimple_bind_vars (stmt);
5684 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5685 if (!can_be_nonlocal (old_var, id)
5686 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5687 remap_decl (old_var, id);
5689 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5690 id->prevent_decl_creation_for_types = true;
5691 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5692 id->prevent_decl_creation_for_types = false;
5696 /* Keep iterating. */
5697 return NULL_TREE;
5700 /* Create a copy of SEQ and remap all decls in it. */
5702 static gimple_seq
5703 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5705 if (!seq)
5706 return NULL;
5708 /* If there are any labels in OMP sequences, they can be only referred to in
5709 the sequence itself and therefore we can do both here. */
5710 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5711 gimple_seq copy = gimple_seq_copy (seq);
5712 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5713 return copy;
5716 /* Copies everything in SEQ and replaces variables and labels local to
5717 current_function_decl. */
5719 gimple_seq
5720 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5722 copy_body_data id;
5723 struct walk_stmt_info wi;
5724 gimple_seq copy;
5726 /* There's nothing to do for NULL_TREE. */
5727 if (seq == NULL)
5728 return seq;
5730 /* Set up ID. */
5731 memset (&id, 0, sizeof (id));
5732 id.src_fn = current_function_decl;
5733 id.dst_fn = current_function_decl;
5734 id.src_cfun = cfun;
5735 id.decl_map = new hash_map<tree, tree>;
5736 id.debug_map = NULL;
5738 id.copy_decl = copy_decl_no_change;
5739 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5740 id.transform_new_cfg = false;
5741 id.transform_return_to_modify = false;
5742 id.transform_parameter = false;
5743 id.transform_lang_insert_block = NULL;
5745 /* Walk the tree once to find local labels. */
5746 memset (&wi, 0, sizeof (wi));
5747 hash_set<tree> visited;
5748 wi.info = &id;
5749 wi.pset = &visited;
5750 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5752 copy = gimple_seq_copy (seq);
5754 /* Walk the copy, remapping decls. */
5755 memset (&wi, 0, sizeof (wi));
5756 wi.info = &id;
5757 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5759 /* Clean up. */
5760 delete id.decl_map;
5761 if (id.debug_map)
5762 delete id.debug_map;
5763 if (id.dependence_map)
5765 delete id.dependence_map;
5766 id.dependence_map = NULL;
5769 return copy;
5773 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5775 static tree
5776 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5778 if (*tp == data)
5779 return (tree) data;
5780 else
5781 return NULL;
5784 DEBUG_FUNCTION bool
5785 debug_find_tree (tree top, tree search)
5787 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5791 /* Declare the variables created by the inliner. Add all the variables in
5792 VARS to BIND_EXPR. */
5794 static void
5795 declare_inline_vars (tree block, tree vars)
5797 tree t;
5798 for (t = vars; t; t = DECL_CHAIN (t))
5800 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5801 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5802 add_local_decl (cfun, t);
5805 if (block)
5806 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5809 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5810 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5811 VAR_DECL translation. */
5813 tree
5814 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5816 /* Don't generate debug information for the copy if we wouldn't have
5817 generated it for the copy either. */
5818 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5819 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5821 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5822 declaration inspired this copy. */
5823 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5825 /* The new variable/label has no RTL, yet. */
5826 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5827 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5828 SET_DECL_RTL (copy, 0);
5829 /* For vector typed decls make sure to update DECL_MODE according
5830 to the new function context. */
5831 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5832 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5834 /* These args would always appear unused, if not for this. */
5835 TREE_USED (copy) = 1;
5837 /* Set the context for the new declaration. */
5838 if (!DECL_CONTEXT (decl))
5839 /* Globals stay global. */
5841 else if (DECL_CONTEXT (decl) != id->src_fn)
5842 /* Things that weren't in the scope of the function we're inlining
5843 from aren't in the scope we're inlining to, either. */
5845 else if (TREE_STATIC (decl))
5846 /* Function-scoped static variables should stay in the original
5847 function. */
5849 else
5851 /* Ordinary automatic local variables are now in the scope of the
5852 new function. */
5853 DECL_CONTEXT (copy) = id->dst_fn;
5854 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5856 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5857 DECL_ATTRIBUTES (copy)
5858 = tree_cons (get_identifier ("omp simt private"), NULL,
5859 DECL_ATTRIBUTES (copy));
5860 id->dst_simt_vars->safe_push (copy);
5864 return copy;
5867 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5868 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5869 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5871 tree
5872 copy_decl_to_var (tree decl, copy_body_data *id)
5874 tree copy, type;
5876 gcc_assert (TREE_CODE (decl) == PARM_DECL
5877 || TREE_CODE (decl) == RESULT_DECL);
5879 type = TREE_TYPE (decl);
5881 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5882 VAR_DECL, DECL_NAME (decl), type);
5883 if (DECL_PT_UID_SET_P (decl))
5884 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5885 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5886 TREE_READONLY (copy) = TREE_READONLY (decl);
5887 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5888 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5890 return copy_decl_for_dup_finish (id, decl, copy);
5893 /* Like copy_decl_to_var, but create a return slot object instead of a
5894 pointer variable for return by invisible reference. */
5896 static tree
5897 copy_result_decl_to_var (tree decl, copy_body_data *id)
5899 tree copy, type;
5901 gcc_assert (TREE_CODE (decl) == PARM_DECL
5902 || TREE_CODE (decl) == RESULT_DECL);
5904 type = TREE_TYPE (decl);
5905 if (DECL_BY_REFERENCE (decl))
5906 type = TREE_TYPE (type);
5908 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5909 VAR_DECL, DECL_NAME (decl), type);
5910 if (DECL_PT_UID_SET_P (decl))
5911 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5912 TREE_READONLY (copy) = TREE_READONLY (decl);
5913 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5914 if (!DECL_BY_REFERENCE (decl))
5916 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5917 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5920 return copy_decl_for_dup_finish (id, decl, copy);
5923 tree
5924 copy_decl_no_change (tree decl, copy_body_data *id)
5926 tree copy;
5928 copy = copy_node (decl);
5930 /* The COPY is not abstract; it will be generated in DST_FN. */
5931 DECL_ABSTRACT_P (copy) = false;
5932 lang_hooks.dup_lang_specific_decl (copy);
5934 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5935 been taken; it's for internal bookkeeping in expand_goto_internal. */
5936 if (TREE_CODE (copy) == LABEL_DECL)
5938 TREE_ADDRESSABLE (copy) = 0;
5939 LABEL_DECL_UID (copy) = -1;
5942 return copy_decl_for_dup_finish (id, decl, copy);
5945 static tree
5946 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5948 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5949 return copy_decl_to_var (decl, id);
5950 else
5951 return copy_decl_no_change (decl, id);
5954 /* Return a copy of the function's argument tree without any modifications. */
5956 static tree
5957 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
5959 tree arg, *parg;
5960 tree new_parm = NULL;
5962 parg = &new_parm;
5963 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
5965 tree new_tree = remap_decl (arg, id);
5966 if (TREE_CODE (new_tree) != PARM_DECL)
5967 new_tree = id->copy_decl (arg, id);
5968 lang_hooks.dup_lang_specific_decl (new_tree);
5969 *parg = new_tree;
5970 parg = &DECL_CHAIN (new_tree);
5972 return new_parm;
5975 /* Return a copy of the function's static chain. */
5976 static tree
5977 copy_static_chain (tree static_chain, copy_body_data * id)
5979 tree *chain_copy, *pvar;
5981 chain_copy = &static_chain;
5982 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5984 tree new_tree = remap_decl (*pvar, id);
5985 lang_hooks.dup_lang_specific_decl (new_tree);
5986 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5987 *pvar = new_tree;
5989 return static_chain;
5992 /* Return true if the function is allowed to be versioned.
5993 This is a guard for the versioning functionality. */
5995 bool
5996 tree_versionable_function_p (tree fndecl)
5998 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5999 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6002 /* Update clone info after duplication. */
6004 static void
6005 update_clone_info (copy_body_data * id)
6007 vec<ipa_param_performed_split, va_gc> *cur_performed_splits
6008 = id->dst_node->clone.performed_splits;
6009 if (cur_performed_splits)
6011 unsigned len = cur_performed_splits->length ();
6012 for (unsigned i = 0; i < len; i++)
6014 ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
6015 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6019 struct cgraph_node *node;
6020 if (!id->dst_node->clones)
6021 return;
6022 for (node = id->dst_node->clones; node != id->dst_node;)
6024 /* First update replace maps to match the new body. */
6025 if (node->clone.tree_map)
6027 unsigned int i;
6028 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
6030 struct ipa_replace_map *replace_info;
6031 replace_info = (*node->clone.tree_map)[i];
6032 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6035 if (node->clone.performed_splits)
6037 unsigned len = vec_safe_length (node->clone.performed_splits);
6038 for (unsigned i = 0; i < len; i++)
6040 ipa_param_performed_split *ps
6041 = &(*node->clone.performed_splits)[i];
6042 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6045 if (unsigned len = vec_safe_length (cur_performed_splits))
6047 /* We do not want to add current performed splits when we are saving
6048 a copy of function body for later during inlining, that would just
6049 duplicate all entries. So let's have a look whether anything
6050 referring to the first dummy_decl is present. */
6051 unsigned dst_len = vec_safe_length (node->clone.performed_splits);
6052 ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6053 for (unsigned i = 0; i < dst_len; i++)
6054 if ((*node->clone.performed_splits)[i].dummy_decl
6055 == first->dummy_decl)
6057 len = 0;
6058 break;
6061 for (unsigned i = 0; i < len; i++)
6062 vec_safe_push (node->clone.performed_splits,
6063 (*cur_performed_splits)[i]);
6064 if (flag_checking)
6066 for (unsigned i = 0; i < dst_len; i++)
6068 ipa_param_performed_split *ps1
6069 = &(*node->clone.performed_splits)[i];
6070 for (unsigned j = i + 1; j < dst_len; j++)
6072 ipa_param_performed_split *ps2
6073 = &(*node->clone.performed_splits)[j];
6074 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6075 || ps1->unit_offset != ps2->unit_offset);
6081 if (node->clones)
6082 node = node->clones;
6083 else if (node->next_sibling_clone)
6084 node = node->next_sibling_clone;
6085 else
6087 while (node != id->dst_node && !node->next_sibling_clone)
6088 node = node->clone_of;
6089 if (node != id->dst_node)
6090 node = node->next_sibling_clone;
6095 /* Create a copy of a function's tree.
6096 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6097 of the original function and the new copied function
6098 respectively. In case we want to replace a DECL
6099 tree with another tree while duplicating the function's
6100 body, TREE_MAP represents the mapping between these
6101 trees. If UPDATE_CLONES is set, the call_stmt fields
6102 of edges of clones of the function will be updated.
6104 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6105 function parameters and return value) should be modified).
6106 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6107 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6109 void
6110 tree_function_versioning (tree old_decl, tree new_decl,
6111 vec<ipa_replace_map *, va_gc> *tree_map,
6112 ipa_param_adjustments *param_adjustments,
6113 bool update_clones, bitmap blocks_to_copy,
6114 basic_block new_entry)
6116 struct cgraph_node *old_version_node;
6117 struct cgraph_node *new_version_node;
6118 copy_body_data id;
6119 tree p;
6120 unsigned i;
6121 struct ipa_replace_map *replace_info;
6122 basic_block old_entry_block, bb;
6123 auto_vec<gimple *, 10> init_stmts;
6124 tree vars = NULL_TREE;
6126 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6127 && TREE_CODE (new_decl) == FUNCTION_DECL);
6128 DECL_POSSIBLY_INLINED (old_decl) = 1;
6130 old_version_node = cgraph_node::get (old_decl);
6131 gcc_checking_assert (old_version_node);
6132 new_version_node = cgraph_node::get (new_decl);
6133 gcc_checking_assert (new_version_node);
6135 /* Copy over debug args. */
6136 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6138 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6139 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6140 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6141 old_debug_args = decl_debug_args_lookup (old_decl);
6142 if (old_debug_args)
6144 new_debug_args = decl_debug_args_insert (new_decl);
6145 *new_debug_args = vec_safe_copy (*old_debug_args);
6149 /* Output the inlining info for this abstract function, since it has been
6150 inlined. If we don't do this now, we can lose the information about the
6151 variables in the function when the blocks get blown away as soon as we
6152 remove the cgraph node. */
6153 (*debug_hooks->outlining_inline_function) (old_decl);
6155 DECL_ARTIFICIAL (new_decl) = 1;
6156 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6157 if (DECL_ORIGIN (old_decl) == old_decl)
6158 old_version_node->used_as_abstract_origin = true;
6159 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6161 /* Prepare the data structures for the tree copy. */
6162 memset (&id, 0, sizeof (id));
6164 /* Generate a new name for the new version. */
6165 id.statements_to_fold = new hash_set<gimple *>;
6167 id.decl_map = new hash_map<tree, tree>;
6168 id.debug_map = NULL;
6169 id.src_fn = old_decl;
6170 id.dst_fn = new_decl;
6171 id.src_node = old_version_node;
6172 id.dst_node = new_version_node;
6173 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6174 id.blocks_to_copy = blocks_to_copy;
6176 id.copy_decl = copy_decl_no_change;
6177 id.transform_call_graph_edges
6178 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6179 id.transform_new_cfg = true;
6180 id.transform_return_to_modify = false;
6181 id.transform_parameter = false;
6182 id.transform_lang_insert_block = NULL;
6184 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6185 (DECL_STRUCT_FUNCTION (old_decl));
6186 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6187 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6188 initialize_cfun (new_decl, old_decl,
6189 new_entry ? new_entry->count : old_entry_block->count);
6190 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6191 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6192 = id.src_cfun->gimple_df->ipa_pta;
6194 /* Copy the function's static chain. */
6195 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6196 if (p)
6197 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6198 = copy_static_chain (p, &id);
6200 auto_vec<int, 16> new_param_indices;
6201 ipa_param_adjustments *old_param_adjustments
6202 = old_version_node->clone.param_adjustments;
6203 if (old_param_adjustments)
6204 old_param_adjustments->get_updated_indices (&new_param_indices);
6206 /* If there's a tree_map, prepare for substitution. */
6207 if (tree_map)
6208 for (i = 0; i < tree_map->length (); i++)
6210 gimple *init;
6211 replace_info = (*tree_map)[i];
6213 int p = replace_info->parm_num;
6214 if (old_param_adjustments)
6215 p = new_param_indices[p];
6217 tree parm;
6218 tree req_type, new_type;
6220 for (parm = DECL_ARGUMENTS (old_decl); p;
6221 parm = DECL_CHAIN (parm))
6222 p--;
6223 tree old_tree = parm;
6224 req_type = TREE_TYPE (parm);
6225 new_type = TREE_TYPE (replace_info->new_tree);
6226 if (!useless_type_conversion_p (req_type, new_type))
6228 if (fold_convertible_p (req_type, replace_info->new_tree))
6229 replace_info->new_tree
6230 = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
6231 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6232 replace_info->new_tree
6233 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
6234 replace_info->new_tree);
6235 else
6237 if (dump_file)
6239 fprintf (dump_file, " const ");
6240 print_generic_expr (dump_file,
6241 replace_info->new_tree);
6242 fprintf (dump_file,
6243 " can't be converted to param ");
6244 print_generic_expr (dump_file, parm);
6245 fprintf (dump_file, "\n");
6247 old_tree = NULL;
6251 if (old_tree)
6253 init = setup_one_parameter (&id, old_tree, replace_info->new_tree,
6254 id.src_fn, NULL, &vars);
6255 if (init)
6256 init_stmts.safe_push (init);
6260 ipa_param_body_adjustments *param_body_adjs = NULL;
6261 if (param_adjustments)
6263 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6264 new_decl, old_decl,
6265 &id, &vars, tree_map);
6266 id.param_body_adjs = param_body_adjs;
6267 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6269 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6270 DECL_ARGUMENTS (new_decl)
6271 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6273 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6274 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6276 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6278 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6279 /* Add local vars. */
6280 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6282 if (DECL_RESULT (old_decl) == NULL_TREE)
6284 else if (param_adjustments && param_adjustments->m_skip_return
6285 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6287 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6288 &id);
6289 declare_inline_vars (NULL, resdecl_repl);
6290 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6292 DECL_RESULT (new_decl)
6293 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6294 RESULT_DECL, NULL_TREE, void_type_node);
6295 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6296 DECL_IS_MALLOC (new_decl) = false;
6297 cfun->returns_struct = 0;
6298 cfun->returns_pcc_struct = 0;
6300 else
6302 tree old_name;
6303 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6304 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6305 if (gimple_in_ssa_p (id.src_cfun)
6306 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6307 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6309 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6310 insert_decl_map (&id, old_name, new_name);
6311 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6312 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6316 /* Set up the destination functions loop tree. */
6317 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6319 cfun->curr_properties &= ~PROP_loops;
6320 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6321 cfun->curr_properties |= PROP_loops;
6324 /* Copy the Function's body. */
6325 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6326 new_entry);
6328 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6329 number_blocks (new_decl);
6331 /* We want to create the BB unconditionally, so that the addition of
6332 debug stmts doesn't affect BB count, which may in the end cause
6333 codegen differences. */
6334 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6335 while (init_stmts.length ())
6336 insert_init_stmt (&id, bb, init_stmts.pop ());
6337 update_clone_info (&id);
6339 /* Remap the nonlocal_goto_save_area, if any. */
6340 if (cfun->nonlocal_goto_save_area)
6342 struct walk_stmt_info wi;
6344 memset (&wi, 0, sizeof (wi));
6345 wi.info = &id;
6346 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6349 /* Clean up. */
6350 delete id.decl_map;
6351 if (id.debug_map)
6352 delete id.debug_map;
6353 free_dominance_info (CDI_DOMINATORS);
6354 free_dominance_info (CDI_POST_DOMINATORS);
6356 update_max_bb_count ();
6357 fold_marked_statements (0, id.statements_to_fold);
6358 delete id.statements_to_fold;
6359 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6360 if (id.dst_node->definition)
6361 cgraph_edge::rebuild_references ();
6362 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6364 calculate_dominance_info (CDI_DOMINATORS);
6365 fix_loop_structure (NULL);
6367 update_ssa (TODO_update_ssa);
6369 /* After partial cloning we need to rescale frequencies, so they are
6370 within proper range in the cloned function. */
6371 if (new_entry)
6373 struct cgraph_edge *e;
6374 rebuild_frequencies ();
6376 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6377 for (e = new_version_node->callees; e; e = e->next_callee)
6379 basic_block bb = gimple_bb (e->call_stmt);
6380 e->count = bb->count;
6382 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6384 basic_block bb = gimple_bb (e->call_stmt);
6385 e->count = bb->count;
6389 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6391 vec<tree, va_gc> **debug_args = NULL;
6392 unsigned int len = 0;
6393 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6395 for (i = 0; i < reset_len; i++)
6397 tree parm = param_body_adjs->m_reset_debug_decls[i];
6398 gcc_assert (is_gimple_reg (parm));
6399 tree ddecl;
6401 if (debug_args == NULL)
6403 debug_args = decl_debug_args_insert (new_decl);
6404 len = vec_safe_length (*debug_args);
6406 ddecl = make_node (DEBUG_EXPR_DECL);
6407 DECL_ARTIFICIAL (ddecl) = 1;
6408 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6409 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6410 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6411 vec_safe_push (*debug_args, ddecl);
6413 if (debug_args != NULL)
6415 /* On the callee side, add
6416 DEBUG D#Y s=> parm
6417 DEBUG var => D#Y
6418 stmts to the first bb where var is a VAR_DECL created for the
6419 optimized away parameter in DECL_INITIAL block. This hints
6420 in the debug info that var (whole DECL_ORIGIN is the parm
6421 PARM_DECL) is optimized away, but could be looked up at the
6422 call site as value of D#X there. */
6423 tree vexpr;
6424 gimple_stmt_iterator cgsi
6425 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6426 gimple *def_temp;
6427 tree var = vars;
6428 i = vec_safe_length (*debug_args);
6431 i -= 2;
6432 while (var != NULL_TREE
6433 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6434 var = TREE_CHAIN (var);
6435 if (var == NULL_TREE)
6436 break;
6437 vexpr = make_node (DEBUG_EXPR_DECL);
6438 tree parm = (**debug_args)[i];
6439 DECL_ARTIFICIAL (vexpr) = 1;
6440 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6441 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6442 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6443 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6444 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6445 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6447 while (i > len);
6450 delete param_body_adjs;
6451 free_dominance_info (CDI_DOMINATORS);
6452 free_dominance_info (CDI_POST_DOMINATORS);
6454 gcc_assert (!id.debug_stmts.exists ());
6455 pop_cfun ();
6456 return;
6459 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6460 the callee and return the inlined body on success. */
6462 tree
6463 maybe_inline_call_in_expr (tree exp)
6465 tree fn = get_callee_fndecl (exp);
6467 /* We can only try to inline "const" functions. */
6468 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6470 call_expr_arg_iterator iter;
6471 copy_body_data id;
6472 tree param, arg, t;
6473 hash_map<tree, tree> decl_map;
6475 /* Remap the parameters. */
6476 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6477 param;
6478 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6479 decl_map.put (param, arg);
6481 memset (&id, 0, sizeof (id));
6482 id.src_fn = fn;
6483 id.dst_fn = current_function_decl;
6484 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6485 id.decl_map = &decl_map;
6487 id.copy_decl = copy_decl_no_change;
6488 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6489 id.transform_new_cfg = false;
6490 id.transform_return_to_modify = true;
6491 id.transform_parameter = true;
6492 id.transform_lang_insert_block = NULL;
6494 /* Make sure not to unshare trees behind the front-end's back
6495 since front-end specific mechanisms may rely on sharing. */
6496 id.regimplify = false;
6497 id.do_not_unshare = true;
6499 /* We're not inside any EH region. */
6500 id.eh_lp_nr = 0;
6502 t = copy_tree_body (&id);
6504 /* We can only return something suitable for use in a GENERIC
6505 expression tree. */
6506 if (TREE_CODE (t) == MODIFY_EXPR)
6507 return TREE_OPERAND (t, 1);
6510 return NULL_TREE;
6513 /* Duplicate a type, fields and all. */
6515 tree
6516 build_duplicate_type (tree type)
6518 struct copy_body_data id;
6520 memset (&id, 0, sizeof (id));
6521 id.src_fn = current_function_decl;
6522 id.dst_fn = current_function_decl;
6523 id.src_cfun = cfun;
6524 id.decl_map = new hash_map<tree, tree>;
6525 id.debug_map = NULL;
6526 id.copy_decl = copy_decl_no_change;
6528 type = remap_type_1 (type, &id);
6530 delete id.decl_map;
6531 if (id.debug_map)
6532 delete id.debug_map;
6534 TYPE_CANONICAL (type) = type;
6536 return type;
6539 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6540 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6541 evaluation. */
6543 tree
6544 copy_fn (tree fn, tree& parms, tree& result)
6546 copy_body_data id;
6547 tree param;
6548 hash_map<tree, tree> decl_map;
6550 tree *p = &parms;
6551 *p = NULL_TREE;
6553 memset (&id, 0, sizeof (id));
6554 id.src_fn = fn;
6555 id.dst_fn = current_function_decl;
6556 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6557 id.decl_map = &decl_map;
6559 id.copy_decl = copy_decl_no_change;
6560 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6561 id.transform_new_cfg = false;
6562 id.transform_return_to_modify = false;
6563 id.transform_parameter = true;
6564 id.transform_lang_insert_block = NULL;
6566 /* Make sure not to unshare trees behind the front-end's back
6567 since front-end specific mechanisms may rely on sharing. */
6568 id.regimplify = false;
6569 id.do_not_unshare = true;
6570 id.do_not_fold = true;
6572 /* We're not inside any EH region. */
6573 id.eh_lp_nr = 0;
6575 /* Remap the parameters and result and return them to the caller. */
6576 for (param = DECL_ARGUMENTS (fn);
6577 param;
6578 param = DECL_CHAIN (param))
6580 *p = remap_decl (param, &id);
6581 p = &DECL_CHAIN (*p);
6584 if (DECL_RESULT (fn))
6585 result = remap_decl (DECL_RESULT (fn), &id);
6586 else
6587 result = NULL_TREE;
6589 return copy_tree_body (&id);