typeck.c (cp_build_function_call_vec): When mark_used fails unconditionally return...
[official-gcc.git] / gcc / tree-inline.c
blob35c005ee9896ad6374e27f79e00d9a1ebd205691
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64 #include "tree-ssa-live.h"
66 /* I'm not real happy about this, but we need to handle gimple and
67 non-gimple trees. */
69 /* Inlining, Cloning, Versioning, Parallelization
71 Inlining: a function body is duplicated, but the PARM_DECLs are
72 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
73 MODIFY_EXPRs that store to a dedicated returned-value variable.
74 The duplicated eh_region info of the copy will later be appended
75 to the info for the caller; the eh_region info in copied throwing
76 statements and RESX statements are adjusted accordingly.
78 Cloning: (only in C++) We have one body for a con/de/structor, and
79 multiple function decls, each with a unique parameter list.
80 Duplicate the body, using the given splay tree; some parameters
81 will become constants (like 0 or 1).
83 Versioning: a function body is duplicated and the result is a new
84 function rather than into blocks of an existing function as with
85 inlining. Some parameters will become constants.
87 Parallelization: a region of a function is duplicated resulting in
88 a new function. Variables may be replaced with complex expressions
89 to enable shared variable semantics.
91 All of these will simultaneously lookup any callgraph edges. If
92 we're going to inline the duplicated function body, and the given
93 function has some cloned callgraph nodes (one for each place this
94 function will be inlined) those callgraph edges will be duplicated.
95 If we're cloning the body, those callgraph edges will be
96 updated to point into the new body. (Note that the original
97 callgraph node and edge list will not be altered.)
99 See the CALL_EXPR handling case in copy_tree_body_r (). */
101 /* To Do:
103 o In order to make inlining-on-trees work, we pessimized
104 function-local static constants. In particular, they are now
105 always output, even when not addressed. Fix this by treating
106 function-local static constants just like global static
107 constants; the back-end already knows not to output them if they
108 are not needed.
110 o Provide heuristics to clamp inlining of recursive template
111 calls? */
114 /* Weights that estimate_num_insns uses to estimate the size of the
115 produced code. */
117 eni_weights eni_size_weights;
119 /* Weights that estimate_num_insns uses to estimate the time necessary
120 to execute the produced code. */
122 eni_weights eni_time_weights;
124 /* Prototypes. */
126 static tree declare_return_variable (copy_body_data *, tree, tree,
127 basic_block);
128 static void remap_block (tree *, copy_body_data *);
129 static void copy_bind_expr (tree *, int *, copy_body_data *);
130 static void declare_inline_vars (tree, tree);
131 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
132 static void prepend_lexical_block (tree current_block, tree new_block);
133 static tree copy_decl_to_var (tree, copy_body_data *);
134 static tree copy_result_decl_to_var (tree, copy_body_data *);
135 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
136 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
137 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
139 /* Insert a tree->tree mapping for ID. Despite the name suggests
140 that the trees should be variables, it is used for more than that. */
142 void
143 insert_decl_map (copy_body_data *id, tree key, tree value)
145 id->decl_map->put (key, value);
147 /* Always insert an identity map as well. If we see this same new
148 node again, we won't want to duplicate it a second time. */
149 if (key != value)
150 id->decl_map->put (value, value);
153 /* Insert a tree->tree mapping for ID. This is only used for
154 variables. */
156 static void
157 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
159 if (!gimple_in_ssa_p (id->src_cfun))
160 return;
162 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
163 return;
165 if (!target_for_debug_bind (key))
166 return;
168 gcc_assert (TREE_CODE (key) == PARM_DECL);
169 gcc_assert (VAR_P (value));
171 if (!id->debug_map)
172 id->debug_map = new hash_map<tree, tree>;
174 id->debug_map->put (key, value);
177 /* If nonzero, we're remapping the contents of inlined debug
178 statements. If negative, an error has occurred, such as a
179 reference to a variable that isn't available in the inlined
180 context. */
181 static int processing_debug_stmt = 0;
183 /* Construct new SSA name for old NAME. ID is the inline context. */
185 static tree
186 remap_ssa_name (tree name, copy_body_data *id)
188 tree new_tree, var;
189 tree *n;
191 gcc_assert (TREE_CODE (name) == SSA_NAME);
193 n = id->decl_map->get (name);
194 if (n)
195 return unshare_expr (*n);
197 if (processing_debug_stmt)
199 if (SSA_NAME_IS_DEFAULT_DEF (name)
200 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
201 && id->entry_bb == NULL
202 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
204 tree vexpr = make_node (DEBUG_EXPR_DECL);
205 gimple *def_temp;
206 gimple_stmt_iterator gsi;
207 tree val = SSA_NAME_VAR (name);
209 n = id->decl_map->get (val);
210 if (n != NULL)
211 val = *n;
212 if (TREE_CODE (val) != PARM_DECL
213 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
215 processing_debug_stmt = -1;
216 return name;
218 n = id->decl_map->get (val);
219 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
220 return *n;
221 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
222 DECL_ARTIFICIAL (vexpr) = 1;
223 TREE_TYPE (vexpr) = TREE_TYPE (name);
224 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
225 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
226 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
227 insert_decl_map (id, val, vexpr);
228 return vexpr;
231 processing_debug_stmt = -1;
232 return name;
235 /* Remap anonymous SSA names or SSA names of anonymous decls. */
236 var = SSA_NAME_VAR (name);
237 if (!var
238 || (!SSA_NAME_IS_DEFAULT_DEF (name)
239 && VAR_P (var)
240 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
241 && DECL_ARTIFICIAL (var)
242 && DECL_IGNORED_P (var)
243 && !DECL_NAME (var)))
245 struct ptr_info_def *pi;
246 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
247 if (!var && SSA_NAME_IDENTIFIER (name))
248 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
249 insert_decl_map (id, name, new_tree);
250 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
251 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
252 /* At least IPA points-to info can be directly transferred. */
253 if (id->src_cfun->gimple_df
254 && id->src_cfun->gimple_df->ipa_pta
255 && POINTER_TYPE_P (TREE_TYPE (name))
256 && (pi = SSA_NAME_PTR_INFO (name))
257 && !pi->pt.anything)
259 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
260 new_pi->pt = pi->pt;
262 return new_tree;
265 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
266 in copy_bb. */
267 new_tree = remap_decl (var, id);
269 /* We might've substituted constant or another SSA_NAME for
270 the variable.
272 Replace the SSA name representing RESULT_DECL by variable during
273 inlining: this saves us from need to introduce PHI node in a case
274 return value is just partly initialized. */
275 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
276 && (!SSA_NAME_VAR (name)
277 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
278 || !id->transform_return_to_modify))
280 struct ptr_info_def *pi;
281 new_tree = make_ssa_name (new_tree);
282 insert_decl_map (id, name, new_tree);
283 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
284 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
285 /* At least IPA points-to info can be directly transferred. */
286 if (id->src_cfun->gimple_df
287 && id->src_cfun->gimple_df->ipa_pta
288 && POINTER_TYPE_P (TREE_TYPE (name))
289 && (pi = SSA_NAME_PTR_INFO (name))
290 && !pi->pt.anything)
292 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
293 new_pi->pt = pi->pt;
295 if (SSA_NAME_IS_DEFAULT_DEF (name))
297 /* By inlining function having uninitialized variable, we might
298 extend the lifetime (variable might get reused). This cause
299 ICE in the case we end up extending lifetime of SSA name across
300 abnormal edge, but also increase register pressure.
302 We simply initialize all uninitialized vars by 0 except
303 for case we are inlining to very first BB. We can avoid
304 this for all BBs that are not inside strongly connected
305 regions of the CFG, but this is expensive to test. */
306 if (id->entry_bb
307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
308 && (!SSA_NAME_VAR (name)
309 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
310 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
311 0)->dest
312 || EDGE_COUNT (id->entry_bb->preds) != 1))
314 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
315 gimple *init_stmt;
316 tree zero = build_zero_cst (TREE_TYPE (new_tree));
318 init_stmt = gimple_build_assign (new_tree, zero);
319 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
320 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
322 else
324 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
325 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
329 else
330 insert_decl_map (id, name, new_tree);
331 return new_tree;
334 /* Remap DECL during the copying of the BLOCK tree for the function. */
336 tree
337 remap_decl (tree decl, copy_body_data *id)
339 tree *n;
341 /* We only remap local variables in the current function. */
343 /* See if we have remapped this declaration. */
345 n = id->decl_map->get (decl);
347 if (!n && processing_debug_stmt)
349 processing_debug_stmt = -1;
350 return decl;
353 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
354 necessary DECLs have already been remapped and we do not want to duplicate
355 a decl coming from outside of the sequence we are copying. */
356 if (!n
357 && id->prevent_decl_creation_for_types
358 && id->remapping_type_depth > 0
359 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
360 return decl;
362 /* If we didn't already have an equivalent for this declaration, create one
363 now. */
364 if (!n)
366 /* Make a copy of the variable or label. */
367 tree t = id->copy_decl (decl, id);
369 /* Remember it, so that if we encounter this local entity again
370 we can reuse this copy. Do this early because remap_type may
371 need this decl for TYPE_STUB_DECL. */
372 insert_decl_map (id, decl, t);
374 if (!DECL_P (t))
375 return t;
377 /* Remap types, if necessary. */
378 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
379 if (TREE_CODE (t) == TYPE_DECL)
381 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
383 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
384 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
385 is not set on the TYPE_DECL, for example in LTO mode. */
386 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
388 tree x = build_variant_type_copy (TREE_TYPE (t));
389 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
390 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
391 DECL_ORIGINAL_TYPE (t) = x;
395 /* Remap sizes as necessary. */
396 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
397 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
399 /* If fields, do likewise for offset and qualifier. */
400 if (TREE_CODE (t) == FIELD_DECL)
402 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
403 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
404 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
407 return t;
410 if (id->do_not_unshare)
411 return *n;
412 else
413 return unshare_expr (*n);
416 static tree
417 remap_type_1 (tree type, copy_body_data *id)
419 tree new_tree, t;
421 /* We do need a copy. build and register it now. If this is a pointer or
422 reference type, remap the designated type and make a new pointer or
423 reference type. */
424 if (TREE_CODE (type) == POINTER_TYPE)
426 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
427 TYPE_MODE (type),
428 TYPE_REF_CAN_ALIAS_ALL (type));
429 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
430 new_tree = build_type_attribute_qual_variant (new_tree,
431 TYPE_ATTRIBUTES (type),
432 TYPE_QUALS (type));
433 insert_decl_map (id, type, new_tree);
434 return new_tree;
436 else if (TREE_CODE (type) == REFERENCE_TYPE)
438 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
439 TYPE_MODE (type),
440 TYPE_REF_CAN_ALIAS_ALL (type));
441 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
442 new_tree = build_type_attribute_qual_variant (new_tree,
443 TYPE_ATTRIBUTES (type),
444 TYPE_QUALS (type));
445 insert_decl_map (id, type, new_tree);
446 return new_tree;
448 else
449 new_tree = copy_node (type);
451 insert_decl_map (id, type, new_tree);
453 /* This is a new type, not a copy of an old type. Need to reassociate
454 variants. We can handle everything except the main variant lazily. */
455 t = TYPE_MAIN_VARIANT (type);
456 if (type != t)
458 t = remap_type (t, id);
459 TYPE_MAIN_VARIANT (new_tree) = t;
460 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
461 TYPE_NEXT_VARIANT (t) = new_tree;
463 else
465 TYPE_MAIN_VARIANT (new_tree) = new_tree;
466 TYPE_NEXT_VARIANT (new_tree) = NULL;
469 if (TYPE_STUB_DECL (type))
470 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
472 /* Lazily create pointer and reference types. */
473 TYPE_POINTER_TO (new_tree) = NULL;
474 TYPE_REFERENCE_TO (new_tree) = NULL;
476 /* Copy all types that may contain references to local variables; be sure to
477 preserve sharing in between type and its main variant when possible. */
478 switch (TREE_CODE (new_tree))
480 case INTEGER_TYPE:
481 case REAL_TYPE:
482 case FIXED_POINT_TYPE:
483 case ENUMERAL_TYPE:
484 case BOOLEAN_TYPE:
485 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
487 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
488 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
490 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
491 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
493 else
495 t = TYPE_MIN_VALUE (new_tree);
496 if (t && TREE_CODE (t) != INTEGER_CST)
497 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
499 t = TYPE_MAX_VALUE (new_tree);
500 if (t && TREE_CODE (t) != INTEGER_CST)
501 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
503 return new_tree;
505 case FUNCTION_TYPE:
506 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
507 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
508 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
509 else
510 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
511 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
512 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
513 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
514 else
515 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
516 return new_tree;
518 case ARRAY_TYPE:
519 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
520 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
521 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
522 else
523 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
525 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
527 gcc_checking_assert (TYPE_DOMAIN (type)
528 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
529 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
531 else
533 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
534 /* For array bounds where we have decided not to copy over the bounds
535 variable which isn't used in OpenMP/OpenACC region, change them to
536 an uninitialized VAR_DECL temporary. */
537 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
538 && id->adjust_array_error_bounds
539 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
541 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
542 DECL_ATTRIBUTES (v)
543 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
544 DECL_ATTRIBUTES (v));
545 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
548 break;
550 case RECORD_TYPE:
551 case UNION_TYPE:
552 case QUAL_UNION_TYPE:
553 if (TYPE_MAIN_VARIANT (type) != type
554 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
555 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
556 else
558 tree f, nf = NULL;
560 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
562 t = remap_decl (f, id);
563 DECL_CONTEXT (t) = new_tree;
564 DECL_CHAIN (t) = nf;
565 nf = t;
567 TYPE_FIELDS (new_tree) = nreverse (nf);
569 break;
571 case OFFSET_TYPE:
572 default:
573 /* Shouldn't have been thought variable sized. */
574 gcc_unreachable ();
577 /* All variants of type share the same size, so use the already remaped data. */
578 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
580 tree s = TYPE_SIZE (type);
581 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
582 tree su = TYPE_SIZE_UNIT (type);
583 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
584 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
585 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
586 || s == mvs);
587 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
588 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
589 || su == mvsu);
590 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
591 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
593 else
595 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
596 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
599 return new_tree;
602 /* Helper function for remap_type_2, called through walk_tree. */
604 static tree
605 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
607 copy_body_data *id = (copy_body_data *) data;
609 if (TYPE_P (*tp))
610 *walk_subtrees = 0;
612 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
613 return *tp;
615 return NULL_TREE;
618 /* Return true if TYPE needs to be remapped because remap_decl on any
619 needed embedded decl returns something other than that decl. */
621 static bool
622 remap_type_2 (tree type, copy_body_data *id)
624 tree t;
626 #define RETURN_TRUE_IF_VAR(T) \
627 do \
629 tree _t = (T); \
630 if (_t) \
632 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
633 return true; \
634 if (!TYPE_SIZES_GIMPLIFIED (type) \
635 && walk_tree (&_t, remap_type_3, id, NULL)) \
636 return true; \
639 while (0)
641 switch (TREE_CODE (type))
643 case POINTER_TYPE:
644 case REFERENCE_TYPE:
645 case FUNCTION_TYPE:
646 case METHOD_TYPE:
647 return remap_type_2 (TREE_TYPE (type), id);
649 case INTEGER_TYPE:
650 case REAL_TYPE:
651 case FIXED_POINT_TYPE:
652 case ENUMERAL_TYPE:
653 case BOOLEAN_TYPE:
654 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
655 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
656 return false;
658 case ARRAY_TYPE:
659 if (remap_type_2 (TREE_TYPE (type), id)
660 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
661 return true;
662 break;
664 case RECORD_TYPE:
665 case UNION_TYPE:
666 case QUAL_UNION_TYPE:
667 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
668 if (TREE_CODE (t) == FIELD_DECL)
670 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
671 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
672 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
673 if (TREE_CODE (type) == QUAL_UNION_TYPE)
674 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
676 break;
678 default:
679 return false;
682 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
683 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
684 return false;
685 #undef RETURN_TRUE_IF_VAR
688 tree
689 remap_type (tree type, copy_body_data *id)
691 tree *node;
692 tree tmp;
694 if (type == NULL)
695 return type;
697 /* See if we have remapped this type. */
698 node = id->decl_map->get (type);
699 if (node)
700 return *node;
702 /* The type only needs remapping if it's variably modified. */
703 if (! variably_modified_type_p (type, id->src_fn)
704 /* Don't remap if copy_decl method doesn't always return a new
705 decl and for all embedded decls returns the passed in decl. */
706 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
708 insert_decl_map (id, type, type);
709 return type;
712 id->remapping_type_depth++;
713 tmp = remap_type_1 (type, id);
714 id->remapping_type_depth--;
716 return tmp;
719 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
721 static bool
722 can_be_nonlocal (tree decl, copy_body_data *id)
724 /* We cannot duplicate function decls. */
725 if (TREE_CODE (decl) == FUNCTION_DECL)
726 return true;
728 /* Local static vars must be non-local or we get multiple declaration
729 problems. */
730 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
731 return true;
733 return false;
736 static tree
737 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
738 copy_body_data *id)
740 tree old_var;
741 tree new_decls = NULL_TREE;
743 /* Remap its variables. */
744 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
746 tree new_var;
748 if (can_be_nonlocal (old_var, id))
750 /* We need to add this variable to the local decls as otherwise
751 nothing else will do so. */
752 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
753 add_local_decl (cfun, old_var);
754 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
755 && !DECL_IGNORED_P (old_var)
756 && nonlocalized_list)
757 vec_safe_push (*nonlocalized_list, old_var);
758 continue;
761 /* Remap the variable. */
762 new_var = remap_decl (old_var, id);
764 /* If we didn't remap this variable, we can't mess with its
765 TREE_CHAIN. If we remapped this variable to the return slot, it's
766 already declared somewhere else, so don't declare it here. */
768 if (new_var == id->retvar)
770 else if (!new_var)
772 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
773 && !DECL_IGNORED_P (old_var)
774 && nonlocalized_list)
775 vec_safe_push (*nonlocalized_list, old_var);
777 else
779 gcc_assert (DECL_P (new_var));
780 DECL_CHAIN (new_var) = new_decls;
781 new_decls = new_var;
783 /* Also copy value-expressions. */
784 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
786 tree tem = DECL_VALUE_EXPR (new_var);
787 bool old_regimplify = id->regimplify;
788 id->remapping_type_depth++;
789 walk_tree (&tem, copy_tree_body_r, id, NULL);
790 id->remapping_type_depth--;
791 id->regimplify = old_regimplify;
792 SET_DECL_VALUE_EXPR (new_var, tem);
797 return nreverse (new_decls);
800 /* Copy the BLOCK to contain remapped versions of the variables
801 therein. And hook the new block into the block-tree. */
803 static void
804 remap_block (tree *block, copy_body_data *id)
806 tree old_block;
807 tree new_block;
809 /* Make the new block. */
810 old_block = *block;
811 new_block = make_node (BLOCK);
812 TREE_USED (new_block) = TREE_USED (old_block);
813 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
814 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
815 BLOCK_NONLOCALIZED_VARS (new_block)
816 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
817 *block = new_block;
819 /* Remap its variables. */
820 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
821 &BLOCK_NONLOCALIZED_VARS (new_block),
822 id);
824 if (id->transform_lang_insert_block)
825 id->transform_lang_insert_block (new_block);
827 /* Remember the remapped block. */
828 insert_decl_map (id, old_block, new_block);
831 /* Copy the whole block tree and root it in id->block. */
833 static tree
834 remap_blocks (tree block, copy_body_data *id)
836 tree t;
837 tree new_tree = block;
839 if (!block)
840 return NULL;
842 remap_block (&new_tree, id);
843 gcc_assert (new_tree != block);
844 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
845 prepend_lexical_block (new_tree, remap_blocks (t, id));
846 /* Blocks are in arbitrary order, but make things slightly prettier and do
847 not swap order when producing a copy. */
848 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
849 return new_tree;
852 /* Remap the block tree rooted at BLOCK to nothing. */
854 static void
855 remap_blocks_to_null (tree block, copy_body_data *id)
857 tree t;
858 insert_decl_map (id, block, NULL_TREE);
859 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
860 remap_blocks_to_null (t, id);
863 /* Remap the location info pointed to by LOCUS. */
865 static location_t
866 remap_location (location_t locus, copy_body_data *id)
868 if (LOCATION_BLOCK (locus))
870 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
871 gcc_assert (n);
872 if (*n)
873 return set_block (locus, *n);
876 locus = LOCATION_LOCUS (locus);
878 if (locus != UNKNOWN_LOCATION && id->block)
879 return set_block (locus, id->block);
881 return locus;
884 static void
885 copy_statement_list (tree *tp)
887 tree_stmt_iterator oi, ni;
888 tree new_tree;
890 new_tree = alloc_stmt_list ();
891 ni = tsi_start (new_tree);
892 oi = tsi_start (*tp);
893 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
894 *tp = new_tree;
896 for (; !tsi_end_p (oi); tsi_next (&oi))
898 tree stmt = tsi_stmt (oi);
899 if (TREE_CODE (stmt) == STATEMENT_LIST)
900 /* This copy is not redundant; tsi_link_after will smash this
901 STATEMENT_LIST into the end of the one we're building, and we
902 don't want to do that with the original. */
903 copy_statement_list (&stmt);
904 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
908 static void
909 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
911 tree block = BIND_EXPR_BLOCK (*tp);
912 /* Copy (and replace) the statement. */
913 copy_tree_r (tp, walk_subtrees, NULL);
914 if (block)
916 remap_block (&block, id);
917 BIND_EXPR_BLOCK (*tp) = block;
920 if (BIND_EXPR_VARS (*tp))
921 /* This will remap a lot of the same decls again, but this should be
922 harmless. */
923 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
927 /* Create a new gimple_seq by remapping all the statements in BODY
928 using the inlining information in ID. */
930 static gimple_seq
931 remap_gimple_seq (gimple_seq body, copy_body_data *id)
933 gimple_stmt_iterator si;
934 gimple_seq new_body = NULL;
936 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
938 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
939 gimple_seq_add_seq (&new_body, new_stmts);
942 return new_body;
946 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
947 block using the mapping information in ID. */
949 static gimple *
950 copy_gimple_bind (gbind *stmt, copy_body_data *id)
952 gimple *new_bind;
953 tree new_block, new_vars;
954 gimple_seq body, new_body;
956 /* Copy the statement. Note that we purposely don't use copy_stmt
957 here because we need to remap statements as we copy. */
958 body = gimple_bind_body (stmt);
959 new_body = remap_gimple_seq (body, id);
961 new_block = gimple_bind_block (stmt);
962 if (new_block)
963 remap_block (&new_block, id);
965 /* This will remap a lot of the same decls again, but this should be
966 harmless. */
967 new_vars = gimple_bind_vars (stmt);
968 if (new_vars)
969 new_vars = remap_decls (new_vars, NULL, id);
971 new_bind = gimple_build_bind (new_vars, new_body, new_block);
973 return new_bind;
976 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
978 static bool
979 is_parm (tree decl)
981 if (TREE_CODE (decl) == SSA_NAME)
983 decl = SSA_NAME_VAR (decl);
984 if (!decl)
985 return false;
988 return (TREE_CODE (decl) == PARM_DECL);
991 /* Remap the dependence CLIQUE from the source to the destination function
992 as specified in ID. */
994 static unsigned short
995 remap_dependence_clique (copy_body_data *id, unsigned short clique)
997 if (clique == 0 || processing_debug_stmt)
998 return 0;
999 if (!id->dependence_map)
1000 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1001 bool existed;
1002 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1003 if (!existed)
1005 /* Clique 1 is reserved for local ones set by PTA. */
1006 if (cfun->last_clique == 0)
1007 cfun->last_clique = 1;
1008 newc = ++cfun->last_clique;
1010 return newc;
1013 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1014 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1015 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1016 recursing into the children nodes of *TP. */
1018 static tree
1019 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1021 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1022 copy_body_data *id = (copy_body_data *) wi_p->info;
1023 tree fn = id->src_fn;
1025 /* For recursive invocations this is no longer the LHS itself. */
1026 bool is_lhs = wi_p->is_lhs;
1027 wi_p->is_lhs = false;
1029 if (TREE_CODE (*tp) == SSA_NAME)
1031 *tp = remap_ssa_name (*tp, id);
1032 *walk_subtrees = 0;
1033 if (is_lhs)
1034 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1035 return NULL;
1037 else if (auto_var_in_fn_p (*tp, fn))
1039 /* Local variables and labels need to be replaced by equivalent
1040 variables. We don't want to copy static variables; there's
1041 only one of those, no matter how many times we inline the
1042 containing function. Similarly for globals from an outer
1043 function. */
1044 tree new_decl;
1046 /* Remap the declaration. */
1047 new_decl = remap_decl (*tp, id);
1048 gcc_assert (new_decl);
1049 /* Replace this variable with the copy. */
1050 STRIP_TYPE_NOPS (new_decl);
1051 /* ??? The C++ frontend uses void * pointer zero to initialize
1052 any other type. This confuses the middle-end type verification.
1053 As cloned bodies do not go through gimplification again the fixup
1054 there doesn't trigger. */
1055 if (TREE_CODE (new_decl) == INTEGER_CST
1056 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1057 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1058 *tp = new_decl;
1059 *walk_subtrees = 0;
1061 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1062 gcc_unreachable ();
1063 else if (TREE_CODE (*tp) == SAVE_EXPR)
1064 gcc_unreachable ();
1065 else if (TREE_CODE (*tp) == LABEL_DECL
1066 && (!DECL_CONTEXT (*tp)
1067 || decl_function_context (*tp) == id->src_fn))
1068 /* These may need to be remapped for EH handling. */
1069 *tp = remap_decl (*tp, id);
1070 else if (TREE_CODE (*tp) == FIELD_DECL)
1072 /* If the enclosing record type is variably_modified_type_p, the field
1073 has already been remapped. Otherwise, it need not be. */
1074 tree *n = id->decl_map->get (*tp);
1075 if (n)
1076 *tp = *n;
1077 *walk_subtrees = 0;
1079 else if (TYPE_P (*tp))
1080 /* Types may need remapping as well. */
1081 *tp = remap_type (*tp, id);
1082 else if (CONSTANT_CLASS_P (*tp))
1084 /* If this is a constant, we have to copy the node iff the type
1085 will be remapped. copy_tree_r will not copy a constant. */
1086 tree new_type = remap_type (TREE_TYPE (*tp), id);
1088 if (new_type == TREE_TYPE (*tp))
1089 *walk_subtrees = 0;
1091 else if (TREE_CODE (*tp) == INTEGER_CST)
1092 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1093 else
1095 *tp = copy_node (*tp);
1096 TREE_TYPE (*tp) = new_type;
1099 else
1101 /* Otherwise, just copy the node. Note that copy_tree_r already
1102 knows not to copy VAR_DECLs, etc., so this is safe. */
1104 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1106 /* We need to re-canonicalize MEM_REFs from inline substitutions
1107 that can happen when a pointer argument is an ADDR_EXPR.
1108 Recurse here manually to allow that. */
1109 tree ptr = TREE_OPERAND (*tp, 0);
1110 tree type = remap_type (TREE_TYPE (*tp), id);
1111 tree old = *tp;
1112 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1113 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1114 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1115 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1116 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1117 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1119 MR_DEPENDENCE_CLIQUE (*tp)
1120 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1121 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1123 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1124 remapped a parameter as the property might be valid only
1125 for the parameter itself. */
1126 if (TREE_THIS_NOTRAP (old)
1127 && (!is_parm (TREE_OPERAND (old, 0))
1128 || (!id->transform_parameter && is_parm (ptr))))
1129 TREE_THIS_NOTRAP (*tp) = 1;
1130 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1131 *walk_subtrees = 0;
1132 return NULL;
1135 /* Here is the "usual case". Copy this tree node, and then
1136 tweak some special cases. */
1137 copy_tree_r (tp, walk_subtrees, NULL);
1139 if (TREE_CODE (*tp) != OMP_CLAUSE)
1140 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1142 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1144 /* The copied TARGET_EXPR has never been expanded, even if the
1145 original node was expanded already. */
1146 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1147 TREE_OPERAND (*tp, 3) = NULL_TREE;
1149 else if (TREE_CODE (*tp) == ADDR_EXPR)
1151 /* Variable substitution need not be simple. In particular,
1152 the MEM_REF substitution above. Make sure that
1153 TREE_CONSTANT and friends are up-to-date. */
1154 int invariant = is_gimple_min_invariant (*tp);
1155 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1156 recompute_tree_invariant_for_addr_expr (*tp);
1158 /* If this used to be invariant, but is not any longer,
1159 then regimplification is probably needed. */
1160 if (invariant && !is_gimple_min_invariant (*tp))
1161 id->regimplify = true;
1163 *walk_subtrees = 0;
1167 /* Update the TREE_BLOCK for the cloned expr. */
1168 if (EXPR_P (*tp))
1170 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1171 tree old_block = TREE_BLOCK (*tp);
1172 if (old_block)
1174 tree *n;
1175 n = id->decl_map->get (TREE_BLOCK (*tp));
1176 if (n)
1177 new_block = *n;
1179 TREE_SET_BLOCK (*tp, new_block);
1182 /* Keep iterating. */
1183 return NULL_TREE;
1187 /* Called from copy_body_id via walk_tree. DATA is really a
1188 `copy_body_data *'. */
1190 tree
1191 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1193 copy_body_data *id = (copy_body_data *) data;
1194 tree fn = id->src_fn;
1195 tree new_block;
1197 /* Begin by recognizing trees that we'll completely rewrite for the
1198 inlining context. Our output for these trees is completely
1199 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1200 into an edge). Further down, we'll handle trees that get
1201 duplicated and/or tweaked. */
1203 /* When requested, RETURN_EXPRs should be transformed to just the
1204 contained MODIFY_EXPR. The branch semantics of the return will
1205 be handled elsewhere by manipulating the CFG rather than a statement. */
1206 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1208 tree assignment = TREE_OPERAND (*tp, 0);
1210 /* If we're returning something, just turn that into an
1211 assignment into the equivalent of the original RESULT_DECL.
1212 If the "assignment" is just the result decl, the result
1213 decl has already been set (e.g. a recent "foo (&result_decl,
1214 ...)"); just toss the entire RETURN_EXPR. */
1215 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1217 /* Replace the RETURN_EXPR with (a copy of) the
1218 MODIFY_EXPR hanging underneath. */
1219 *tp = copy_node (assignment);
1221 else /* Else the RETURN_EXPR returns no value. */
1223 *tp = NULL;
1224 return (tree) (void *)1;
1227 else if (TREE_CODE (*tp) == SSA_NAME)
1229 *tp = remap_ssa_name (*tp, id);
1230 *walk_subtrees = 0;
1231 return NULL;
1234 /* Local variables and labels need to be replaced by equivalent
1235 variables. We don't want to copy static variables; there's only
1236 one of those, no matter how many times we inline the containing
1237 function. Similarly for globals from an outer function. */
1238 else if (auto_var_in_fn_p (*tp, fn))
1240 tree new_decl;
1242 /* Remap the declaration. */
1243 new_decl = remap_decl (*tp, id);
1244 gcc_assert (new_decl);
1245 /* Replace this variable with the copy. */
1246 STRIP_TYPE_NOPS (new_decl);
1247 *tp = new_decl;
1248 *walk_subtrees = 0;
1250 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1251 copy_statement_list (tp);
1252 else if (TREE_CODE (*tp) == SAVE_EXPR
1253 || TREE_CODE (*tp) == TARGET_EXPR)
1254 remap_save_expr (tp, id->decl_map, walk_subtrees);
1255 else if (TREE_CODE (*tp) == LABEL_DECL
1256 && (! DECL_CONTEXT (*tp)
1257 || decl_function_context (*tp) == id->src_fn))
1258 /* These may need to be remapped for EH handling. */
1259 *tp = remap_decl (*tp, id);
1260 else if (TREE_CODE (*tp) == BIND_EXPR)
1261 copy_bind_expr (tp, walk_subtrees, id);
1262 /* Types may need remapping as well. */
1263 else if (TYPE_P (*tp))
1264 *tp = remap_type (*tp, id);
1266 /* If this is a constant, we have to copy the node iff the type will be
1267 remapped. copy_tree_r will not copy a constant. */
1268 else if (CONSTANT_CLASS_P (*tp))
1270 tree new_type = remap_type (TREE_TYPE (*tp), id);
1272 if (new_type == TREE_TYPE (*tp))
1273 *walk_subtrees = 0;
1275 else if (TREE_CODE (*tp) == INTEGER_CST)
1276 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1277 else
1279 *tp = copy_node (*tp);
1280 TREE_TYPE (*tp) = new_type;
1284 /* Otherwise, just copy the node. Note that copy_tree_r already
1285 knows not to copy VAR_DECLs, etc., so this is safe. */
1286 else
1288 /* Here we handle trees that are not completely rewritten.
1289 First we detect some inlining-induced bogosities for
1290 discarding. */
1291 if (TREE_CODE (*tp) == MODIFY_EXPR
1292 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1293 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1295 /* Some assignments VAR = VAR; don't generate any rtl code
1296 and thus don't count as variable modification. Avoid
1297 keeping bogosities like 0 = 0. */
1298 tree decl = TREE_OPERAND (*tp, 0), value;
1299 tree *n;
1301 n = id->decl_map->get (decl);
1302 if (n)
1304 value = *n;
1305 STRIP_TYPE_NOPS (value);
1306 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1308 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1309 return copy_tree_body_r (tp, walk_subtrees, data);
1313 else if (TREE_CODE (*tp) == INDIRECT_REF)
1315 /* Get rid of *& from inline substitutions that can happen when a
1316 pointer argument is an ADDR_EXPR. */
1317 tree decl = TREE_OPERAND (*tp, 0);
1318 tree *n = id->decl_map->get (decl);
1319 if (n)
1321 /* If we happen to get an ADDR_EXPR in n->value, strip
1322 it manually here as we'll eventually get ADDR_EXPRs
1323 which lie about their types pointed to. In this case
1324 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1325 but we absolutely rely on that. As fold_indirect_ref
1326 does other useful transformations, try that first, though. */
1327 tree type = TREE_TYPE (*tp);
1328 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1329 tree old = *tp;
1330 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1331 if (! *tp)
1333 type = remap_type (type, id);
1334 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1337 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1338 /* ??? We should either assert here or build
1339 a VIEW_CONVERT_EXPR instead of blindly leaking
1340 incompatible types to our IL. */
1341 if (! *tp)
1342 *tp = TREE_OPERAND (ptr, 0);
1344 else
1346 *tp = build1 (INDIRECT_REF, type, ptr);
1347 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1348 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1349 TREE_READONLY (*tp) = TREE_READONLY (old);
1350 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1351 have remapped a parameter as the property might be
1352 valid only for the parameter itself. */
1353 if (TREE_THIS_NOTRAP (old)
1354 && (!is_parm (TREE_OPERAND (old, 0))
1355 || (!id->transform_parameter && is_parm (ptr))))
1356 TREE_THIS_NOTRAP (*tp) = 1;
1359 *walk_subtrees = 0;
1360 return NULL;
1363 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1365 /* We need to re-canonicalize MEM_REFs from inline substitutions
1366 that can happen when a pointer argument is an ADDR_EXPR.
1367 Recurse here manually to allow that. */
1368 tree ptr = TREE_OPERAND (*tp, 0);
1369 tree type = remap_type (TREE_TYPE (*tp), id);
1370 tree old = *tp;
1371 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1372 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1373 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1374 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1375 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1376 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1378 MR_DEPENDENCE_CLIQUE (*tp)
1379 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1380 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1382 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1383 remapped a parameter as the property might be valid only
1384 for the parameter itself. */
1385 if (TREE_THIS_NOTRAP (old)
1386 && (!is_parm (TREE_OPERAND (old, 0))
1387 || (!id->transform_parameter && is_parm (ptr))))
1388 TREE_THIS_NOTRAP (*tp) = 1;
1389 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1390 *walk_subtrees = 0;
1391 return NULL;
1394 /* Here is the "usual case". Copy this tree node, and then
1395 tweak some special cases. */
1396 copy_tree_r (tp, walk_subtrees, NULL);
1398 /* If EXPR has block defined, map it to newly constructed block.
1399 When inlining we want EXPRs without block appear in the block
1400 of function call if we are not remapping a type. */
1401 if (EXPR_P (*tp))
1403 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1404 if (TREE_BLOCK (*tp))
1406 tree *n;
1407 n = id->decl_map->get (TREE_BLOCK (*tp));
1408 if (n)
1409 new_block = *n;
1411 TREE_SET_BLOCK (*tp, new_block);
1414 if (TREE_CODE (*tp) != OMP_CLAUSE)
1415 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1417 /* The copied TARGET_EXPR has never been expanded, even if the
1418 original node was expanded already. */
1419 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1421 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1422 TREE_OPERAND (*tp, 3) = NULL_TREE;
1425 /* Variable substitution need not be simple. In particular, the
1426 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1427 and friends are up-to-date. */
1428 else if (TREE_CODE (*tp) == ADDR_EXPR)
1430 int invariant = is_gimple_min_invariant (*tp);
1431 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1433 /* Handle the case where we substituted an INDIRECT_REF
1434 into the operand of the ADDR_EXPR. */
1435 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1436 && !id->do_not_fold)
1438 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1439 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1440 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1441 *tp = t;
1443 else
1444 recompute_tree_invariant_for_addr_expr (*tp);
1446 /* If this used to be invariant, but is not any longer,
1447 then regimplification is probably needed. */
1448 if (invariant && !is_gimple_min_invariant (*tp))
1449 id->regimplify = true;
1451 *walk_subtrees = 0;
1455 /* Keep iterating. */
1456 return NULL_TREE;
1459 /* Helper for remap_gimple_stmt. Given an EH region number for the
1460 source function, map that to the duplicate EH region number in
1461 the destination function. */
1463 static int
1464 remap_eh_region_nr (int old_nr, copy_body_data *id)
1466 eh_region old_r, new_r;
1468 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1469 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1471 return new_r->index;
1474 /* Similar, but operate on INTEGER_CSTs. */
1476 static tree
1477 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1479 int old_nr, new_nr;
1481 old_nr = tree_to_shwi (old_t_nr);
1482 new_nr = remap_eh_region_nr (old_nr, id);
1484 return build_int_cst (integer_type_node, new_nr);
1487 /* Helper for copy_bb. Remap statement STMT using the inlining
1488 information in ID. Return the new statement copy. */
1490 static gimple_seq
1491 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1493 gimple *copy = NULL;
1494 struct walk_stmt_info wi;
1495 bool skip_first = false;
1496 gimple_seq stmts = NULL;
1498 if (is_gimple_debug (stmt)
1499 && (gimple_debug_nonbind_marker_p (stmt)
1500 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1501 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1502 return NULL;
1504 /* Begin by recognizing trees that we'll completely rewrite for the
1505 inlining context. Our output for these trees is completely
1506 different from our input (e.g. RETURN_EXPR is deleted and morphs
1507 into an edge). Further down, we'll handle trees that get
1508 duplicated and/or tweaked. */
1510 /* When requested, GIMPLE_RETURN should be transformed to just the
1511 contained GIMPLE_ASSIGN. The branch semantics of the return will
1512 be handled elsewhere by manipulating the CFG rather than the
1513 statement. */
1514 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1516 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1518 /* If we're returning something, just turn that into an
1519 assignment to the equivalent of the original RESULT_DECL.
1520 If RETVAL is just the result decl, the result decl has
1521 already been set (e.g. a recent "foo (&result_decl, ...)");
1522 just toss the entire GIMPLE_RETURN. */
1523 if (retval
1524 && (TREE_CODE (retval) != RESULT_DECL
1525 && (TREE_CODE (retval) != SSA_NAME
1526 || ! SSA_NAME_VAR (retval)
1527 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1529 copy = gimple_build_assign (id->do_not_unshare
1530 ? id->retvar : unshare_expr (id->retvar),
1531 retval);
1532 /* id->retvar is already substituted. Skip it on later remapping. */
1533 skip_first = true;
1535 else
1536 return NULL;
1538 else if (gimple_has_substatements (stmt))
1540 gimple_seq s1, s2;
1542 /* When cloning bodies from the C++ front end, we will be handed bodies
1543 in High GIMPLE form. Handle here all the High GIMPLE statements that
1544 have embedded statements. */
1545 switch (gimple_code (stmt))
1547 case GIMPLE_BIND:
1548 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1549 break;
1551 case GIMPLE_CATCH:
1553 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1554 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1555 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1557 break;
1559 case GIMPLE_EH_FILTER:
1560 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1561 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1562 break;
1564 case GIMPLE_TRY:
1565 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1566 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1567 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1568 break;
1570 case GIMPLE_WITH_CLEANUP_EXPR:
1571 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1572 copy = gimple_build_wce (s1);
1573 break;
1575 case GIMPLE_OMP_PARALLEL:
1577 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1578 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1579 copy = gimple_build_omp_parallel
1580 (s1,
1581 gimple_omp_parallel_clauses (omp_par_stmt),
1582 gimple_omp_parallel_child_fn (omp_par_stmt),
1583 gimple_omp_parallel_data_arg (omp_par_stmt));
1585 break;
1587 case GIMPLE_OMP_TASK:
1588 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1589 copy = gimple_build_omp_task
1590 (s1,
1591 gimple_omp_task_clauses (stmt),
1592 gimple_omp_task_child_fn (stmt),
1593 gimple_omp_task_data_arg (stmt),
1594 gimple_omp_task_copy_fn (stmt),
1595 gimple_omp_task_arg_size (stmt),
1596 gimple_omp_task_arg_align (stmt));
1597 break;
1599 case GIMPLE_OMP_FOR:
1600 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1601 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1602 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1603 gimple_omp_for_clauses (stmt),
1604 gimple_omp_for_collapse (stmt), s2);
1606 size_t i;
1607 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1609 gimple_omp_for_set_index (copy, i,
1610 gimple_omp_for_index (stmt, i));
1611 gimple_omp_for_set_initial (copy, i,
1612 gimple_omp_for_initial (stmt, i));
1613 gimple_omp_for_set_final (copy, i,
1614 gimple_omp_for_final (stmt, i));
1615 gimple_omp_for_set_incr (copy, i,
1616 gimple_omp_for_incr (stmt, i));
1617 gimple_omp_for_set_cond (copy, i,
1618 gimple_omp_for_cond (stmt, i));
1621 break;
1623 case GIMPLE_OMP_MASTER:
1624 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1625 copy = gimple_build_omp_master (s1);
1626 break;
1628 case GIMPLE_OMP_TASKGROUP:
1629 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1630 copy = gimple_build_omp_taskgroup
1631 (s1, gimple_omp_taskgroup_clauses (stmt));
1632 break;
1634 case GIMPLE_OMP_ORDERED:
1635 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1636 copy = gimple_build_omp_ordered
1637 (s1,
1638 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1639 break;
1641 case GIMPLE_OMP_SECTION:
1642 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1643 copy = gimple_build_omp_section (s1);
1644 break;
1646 case GIMPLE_OMP_SECTIONS:
1647 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1648 copy = gimple_build_omp_sections
1649 (s1, gimple_omp_sections_clauses (stmt));
1650 break;
1652 case GIMPLE_OMP_SINGLE:
1653 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1654 copy = gimple_build_omp_single
1655 (s1, gimple_omp_single_clauses (stmt));
1656 break;
1658 case GIMPLE_OMP_TARGET:
1659 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1660 copy = gimple_build_omp_target
1661 (s1, gimple_omp_target_kind (stmt),
1662 gimple_omp_target_clauses (stmt));
1663 break;
1665 case GIMPLE_OMP_TEAMS:
1666 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1667 copy = gimple_build_omp_teams
1668 (s1, gimple_omp_teams_clauses (stmt));
1669 break;
1671 case GIMPLE_OMP_CRITICAL:
1672 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1673 copy = gimple_build_omp_critical (s1,
1674 gimple_omp_critical_name
1675 (as_a <gomp_critical *> (stmt)),
1676 gimple_omp_critical_clauses
1677 (as_a <gomp_critical *> (stmt)));
1678 break;
1680 case GIMPLE_TRANSACTION:
1682 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1683 gtransaction *new_trans_stmt;
1684 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1685 id);
1686 copy = new_trans_stmt = gimple_build_transaction (s1);
1687 gimple_transaction_set_subcode (new_trans_stmt,
1688 gimple_transaction_subcode (old_trans_stmt));
1689 gimple_transaction_set_label_norm (new_trans_stmt,
1690 gimple_transaction_label_norm (old_trans_stmt));
1691 gimple_transaction_set_label_uninst (new_trans_stmt,
1692 gimple_transaction_label_uninst (old_trans_stmt));
1693 gimple_transaction_set_label_over (new_trans_stmt,
1694 gimple_transaction_label_over (old_trans_stmt));
1696 break;
1698 default:
1699 gcc_unreachable ();
1702 else
1704 if (gimple_assign_copy_p (stmt)
1705 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1706 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1708 /* Here we handle statements that are not completely rewritten.
1709 First we detect some inlining-induced bogosities for
1710 discarding. */
1712 /* Some assignments VAR = VAR; don't generate any rtl code
1713 and thus don't count as variable modification. Avoid
1714 keeping bogosities like 0 = 0. */
1715 tree decl = gimple_assign_lhs (stmt), value;
1716 tree *n;
1718 n = id->decl_map->get (decl);
1719 if (n)
1721 value = *n;
1722 STRIP_TYPE_NOPS (value);
1723 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1724 return NULL;
1728 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1729 in a block that we aren't copying during tree_function_versioning,
1730 just drop the clobber stmt. */
1731 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1733 tree lhs = gimple_assign_lhs (stmt);
1734 if (TREE_CODE (lhs) == MEM_REF
1735 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1737 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1738 if (gimple_bb (def_stmt)
1739 && !bitmap_bit_p (id->blocks_to_copy,
1740 gimple_bb (def_stmt)->index))
1741 return NULL;
1745 if (gimple_debug_bind_p (stmt))
1747 gdebug *copy
1748 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1749 gimple_debug_bind_get_value (stmt),
1750 stmt);
1751 if (id->reset_location)
1752 gimple_set_location (copy, input_location);
1753 id->debug_stmts.safe_push (copy);
1754 gimple_seq_add_stmt (&stmts, copy);
1755 return stmts;
1757 if (gimple_debug_source_bind_p (stmt))
1759 gdebug *copy = gimple_build_debug_source_bind
1760 (gimple_debug_source_bind_get_var (stmt),
1761 gimple_debug_source_bind_get_value (stmt),
1762 stmt);
1763 if (id->reset_location)
1764 gimple_set_location (copy, input_location);
1765 id->debug_stmts.safe_push (copy);
1766 gimple_seq_add_stmt (&stmts, copy);
1767 return stmts;
1769 if (gimple_debug_nonbind_marker_p (stmt))
1771 /* If the inlined function has too many debug markers,
1772 don't copy them. */
1773 if (id->src_cfun->debug_marker_count
1774 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1775 return stmts;
1777 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1778 if (id->reset_location)
1779 gimple_set_location (copy, input_location);
1780 id->debug_stmts.safe_push (copy);
1781 gimple_seq_add_stmt (&stmts, copy);
1782 return stmts;
1785 /* Create a new deep copy of the statement. */
1786 copy = gimple_copy (stmt);
1788 /* Clear flags that need revisiting. */
1789 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1791 if (gimple_call_tail_p (call_stmt))
1792 gimple_call_set_tail (call_stmt, false);
1793 if (gimple_call_from_thunk_p (call_stmt))
1794 gimple_call_set_from_thunk (call_stmt, false);
1795 if (gimple_call_internal_p (call_stmt))
1796 switch (gimple_call_internal_fn (call_stmt))
1798 case IFN_GOMP_SIMD_LANE:
1799 case IFN_GOMP_SIMD_VF:
1800 case IFN_GOMP_SIMD_LAST_LANE:
1801 case IFN_GOMP_SIMD_ORDERED_START:
1802 case IFN_GOMP_SIMD_ORDERED_END:
1803 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1804 break;
1805 default:
1806 break;
1810 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1811 RESX and EH_DISPATCH. */
1812 if (id->eh_map)
1813 switch (gimple_code (copy))
1815 case GIMPLE_CALL:
1817 tree r, fndecl = gimple_call_fndecl (copy);
1818 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1819 switch (DECL_FUNCTION_CODE (fndecl))
1821 case BUILT_IN_EH_COPY_VALUES:
1822 r = gimple_call_arg (copy, 1);
1823 r = remap_eh_region_tree_nr (r, id);
1824 gimple_call_set_arg (copy, 1, r);
1825 /* FALLTHRU */
1827 case BUILT_IN_EH_POINTER:
1828 case BUILT_IN_EH_FILTER:
1829 r = gimple_call_arg (copy, 0);
1830 r = remap_eh_region_tree_nr (r, id);
1831 gimple_call_set_arg (copy, 0, r);
1832 break;
1834 default:
1835 break;
1838 /* Reset alias info if we didn't apply measures to
1839 keep it valid over inlining by setting DECL_PT_UID. */
1840 if (!id->src_cfun->gimple_df
1841 || !id->src_cfun->gimple_df->ipa_pta)
1842 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1844 break;
1846 case GIMPLE_RESX:
1848 gresx *resx_stmt = as_a <gresx *> (copy);
1849 int r = gimple_resx_region (resx_stmt);
1850 r = remap_eh_region_nr (r, id);
1851 gimple_resx_set_region (resx_stmt, r);
1853 break;
1855 case GIMPLE_EH_DISPATCH:
1857 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1858 int r = gimple_eh_dispatch_region (eh_dispatch);
1859 r = remap_eh_region_nr (r, id);
1860 gimple_eh_dispatch_set_region (eh_dispatch, r);
1862 break;
1864 default:
1865 break;
1869 /* If STMT has a block defined, map it to the newly constructed block. */
1870 if (tree block = gimple_block (copy))
1872 tree *n;
1873 n = id->decl_map->get (block);
1874 gcc_assert (n);
1875 gimple_set_block (copy, *n);
1878 if (id->reset_location)
1879 gimple_set_location (copy, input_location);
1881 /* Debug statements ought to be rebuilt and not copied. */
1882 gcc_checking_assert (!is_gimple_debug (copy));
1884 /* Remap all the operands in COPY. */
1885 memset (&wi, 0, sizeof (wi));
1886 wi.info = id;
1887 if (skip_first)
1888 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1889 else
1890 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1892 /* Clear the copied virtual operands. We are not remapping them here
1893 but are going to recreate them from scratch. */
1894 if (gimple_has_mem_ops (copy))
1896 gimple_set_vdef (copy, NULL_TREE);
1897 gimple_set_vuse (copy, NULL_TREE);
1900 gimple_seq_add_stmt (&stmts, copy);
1901 return stmts;
1905 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1906 later */
1908 static basic_block
1909 copy_bb (copy_body_data *id, basic_block bb,
1910 profile_count num, profile_count den)
1912 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1913 basic_block copy_basic_block;
1914 tree decl;
1915 basic_block prev;
1917 profile_count::adjust_for_ipa_scaling (&num, &den);
1919 /* Search for previous copied basic block. */
1920 prev = bb->prev_bb;
1921 while (!prev->aux)
1922 prev = prev->prev_bb;
1924 /* create_basic_block() will append every new block to
1925 basic_block_info automatically. */
1926 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1927 copy_basic_block->count = bb->count.apply_scale (num, den);
1929 copy_gsi = gsi_start_bb (copy_basic_block);
1931 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1933 gimple_seq stmts;
1934 gimple *stmt = gsi_stmt (gsi);
1935 gimple *orig_stmt = stmt;
1936 gimple_stmt_iterator stmts_gsi;
1937 bool stmt_added = false;
1939 id->regimplify = false;
1940 stmts = remap_gimple_stmt (stmt, id);
1942 if (gimple_seq_empty_p (stmts))
1943 continue;
1945 seq_gsi = copy_gsi;
1947 for (stmts_gsi = gsi_start (stmts);
1948 !gsi_end_p (stmts_gsi); )
1950 stmt = gsi_stmt (stmts_gsi);
1952 /* Advance iterator now before stmt is moved to seq_gsi. */
1953 gsi_next (&stmts_gsi);
1955 if (gimple_nop_p (stmt))
1956 continue;
1958 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1959 orig_stmt);
1961 /* With return slot optimization we can end up with
1962 non-gimple (foo *)&this->m, fix that here. */
1963 if (is_gimple_assign (stmt)
1964 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1965 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1967 tree new_rhs;
1968 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1969 gimple_assign_rhs1 (stmt),
1970 true, NULL, false,
1971 GSI_CONTINUE_LINKING);
1972 gimple_assign_set_rhs1 (stmt, new_rhs);
1973 id->regimplify = false;
1976 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1978 if (id->regimplify)
1979 gimple_regimplify_operands (stmt, &seq_gsi);
1981 stmt_added = true;
1984 if (!stmt_added)
1985 continue;
1987 /* If copy_basic_block has been empty at the start of this iteration,
1988 call gsi_start_bb again to get at the newly added statements. */
1989 if (gsi_end_p (copy_gsi))
1990 copy_gsi = gsi_start_bb (copy_basic_block);
1991 else
1992 gsi_next (&copy_gsi);
1994 /* Process the new statement. The call to gimple_regimplify_operands
1995 possibly turned the statement into multiple statements, we
1996 need to process all of them. */
1999 tree fn;
2000 gcall *call_stmt;
2002 stmt = gsi_stmt (copy_gsi);
2003 call_stmt = dyn_cast <gcall *> (stmt);
2004 if (call_stmt
2005 && gimple_call_va_arg_pack_p (call_stmt)
2006 && id->call_stmt
2007 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2009 /* __builtin_va_arg_pack () should be replaced by
2010 all arguments corresponding to ... in the caller. */
2011 tree p;
2012 gcall *new_call;
2013 vec<tree> argarray;
2014 size_t nargs = gimple_call_num_args (id->call_stmt);
2015 size_t n;
2017 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2018 nargs--;
2020 /* Create the new array of arguments. */
2021 n = nargs + gimple_call_num_args (call_stmt);
2022 argarray.create (n);
2023 argarray.safe_grow_cleared (n);
2025 /* Copy all the arguments before '...' */
2026 memcpy (argarray.address (),
2027 gimple_call_arg_ptr (call_stmt, 0),
2028 gimple_call_num_args (call_stmt) * sizeof (tree));
2030 /* Append the arguments passed in '...' */
2031 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2032 gimple_call_arg_ptr (id->call_stmt, 0)
2033 + (gimple_call_num_args (id->call_stmt) - nargs),
2034 nargs * sizeof (tree));
2036 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2037 argarray);
2039 argarray.release ();
2041 /* Copy all GIMPLE_CALL flags, location and block, except
2042 GF_CALL_VA_ARG_PACK. */
2043 gimple_call_copy_flags (new_call, call_stmt);
2044 gimple_call_set_va_arg_pack (new_call, false);
2045 /* location includes block. */
2046 gimple_set_location (new_call, gimple_location (stmt));
2047 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2049 gsi_replace (&copy_gsi, new_call, false);
2050 stmt = new_call;
2052 else if (call_stmt
2053 && id->call_stmt
2054 && (decl = gimple_call_fndecl (stmt))
2055 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2057 /* __builtin_va_arg_pack_len () should be replaced by
2058 the number of anonymous arguments. */
2059 size_t nargs = gimple_call_num_args (id->call_stmt);
2060 tree count, p;
2061 gimple *new_stmt;
2063 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2064 nargs--;
2066 if (!gimple_call_lhs (stmt))
2068 /* Drop unused calls. */
2069 gsi_remove (&copy_gsi, false);
2070 continue;
2072 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2074 count = build_int_cst (integer_type_node, nargs);
2075 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2076 gsi_replace (&copy_gsi, new_stmt, false);
2077 stmt = new_stmt;
2079 else if (nargs != 0)
2081 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2082 count = build_int_cst (integer_type_node, nargs);
2083 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2084 PLUS_EXPR, newlhs, count);
2085 gimple_call_set_lhs (stmt, newlhs);
2086 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2089 else if (call_stmt
2090 && id->call_stmt
2091 && gimple_call_internal_p (stmt)
2092 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2094 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2095 gsi_remove (&copy_gsi, false);
2096 continue;
2099 /* Statements produced by inlining can be unfolded, especially
2100 when we constant propagated some operands. We can't fold
2101 them right now for two reasons:
2102 1) folding require SSA_NAME_DEF_STMTs to be correct
2103 2) we can't change function calls to builtins.
2104 So we just mark statement for later folding. We mark
2105 all new statements, instead just statements that has changed
2106 by some nontrivial substitution so even statements made
2107 foldable indirectly are updated. If this turns out to be
2108 expensive, copy_body can be told to watch for nontrivial
2109 changes. */
2110 if (id->statements_to_fold)
2111 id->statements_to_fold->add (stmt);
2113 /* We're duplicating a CALL_EXPR. Find any corresponding
2114 callgraph edges and update or duplicate them. */
2115 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2117 struct cgraph_edge *edge;
2119 switch (id->transform_call_graph_edges)
2121 case CB_CGE_DUPLICATE:
2122 edge = id->src_node->get_edge (orig_stmt);
2123 if (edge)
2125 struct cgraph_edge *old_edge = edge;
2126 profile_count old_cnt = edge->count;
2127 edge = edge->clone (id->dst_node, call_stmt,
2128 gimple_uid (stmt),
2129 num, den,
2130 true);
2132 /* Speculative calls consist of two edges - direct and
2133 indirect. Duplicate the whole thing and distribute
2134 frequencies accordingly. */
2135 if (edge->speculative)
2137 struct cgraph_edge *direct, *indirect;
2138 struct ipa_ref *ref;
2140 gcc_assert (!edge->indirect_unknown_callee);
2141 old_edge->speculative_call_info (direct, indirect, ref);
2143 profile_count indir_cnt = indirect->count;
2144 indirect = indirect->clone (id->dst_node, call_stmt,
2145 gimple_uid (stmt),
2146 num, den,
2147 true);
2149 profile_probability prob
2150 = indir_cnt.probability_in (old_cnt + indir_cnt);
2151 indirect->count
2152 = copy_basic_block->count.apply_probability (prob);
2153 edge->count = copy_basic_block->count - indirect->count;
2154 id->dst_node->clone_reference (ref, stmt);
2156 else
2157 edge->count = copy_basic_block->count;
2159 break;
2161 case CB_CGE_MOVE_CLONES:
2162 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2163 call_stmt);
2164 edge = id->dst_node->get_edge (stmt);
2165 break;
2167 case CB_CGE_MOVE:
2168 edge = id->dst_node->get_edge (orig_stmt);
2169 if (edge)
2170 edge->set_call_stmt (call_stmt);
2171 break;
2173 default:
2174 gcc_unreachable ();
2177 /* Constant propagation on argument done during inlining
2178 may create new direct call. Produce an edge for it. */
2179 if ((!edge
2180 || (edge->indirect_inlining_edge
2181 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2182 && id->dst_node->definition
2183 && (fn = gimple_call_fndecl (stmt)) != NULL)
2185 struct cgraph_node *dest = cgraph_node::get_create (fn);
2187 /* We have missing edge in the callgraph. This can happen
2188 when previous inlining turned an indirect call into a
2189 direct call by constant propagating arguments or we are
2190 producing dead clone (for further cloning). In all
2191 other cases we hit a bug (incorrect node sharing is the
2192 most common reason for missing edges). */
2193 gcc_assert (!dest->definition
2194 || dest->address_taken
2195 || !id->src_node->definition
2196 || !id->dst_node->definition);
2197 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2198 id->dst_node->create_edge_including_clones
2199 (dest, orig_stmt, call_stmt, bb->count,
2200 CIF_ORIGINALLY_INDIRECT_CALL);
2201 else
2202 id->dst_node->create_edge (dest, call_stmt,
2203 bb->count)->inline_failed
2204 = CIF_ORIGINALLY_INDIRECT_CALL;
2205 if (dump_file)
2207 fprintf (dump_file, "Created new direct edge to %s\n",
2208 dest->name ());
2212 notice_special_calls (as_a <gcall *> (stmt));
2215 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2216 id->eh_map, id->eh_lp_nr);
2218 gsi_next (&copy_gsi);
2220 while (!gsi_end_p (copy_gsi));
2222 copy_gsi = gsi_last_bb (copy_basic_block);
2225 return copy_basic_block;
2228 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2229 form is quite easy, since dominator relationship for old basic blocks does
2230 not change.
2232 There is however exception where inlining might change dominator relation
2233 across EH edges from basic block within inlined functions destinating
2234 to landing pads in function we inline into.
2236 The function fills in PHI_RESULTs of such PHI nodes if they refer
2237 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2238 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2239 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2240 set, and this means that there will be no overlapping live ranges
2241 for the underlying symbol.
2243 This might change in future if we allow redirecting of EH edges and
2244 we might want to change way build CFG pre-inlining to include
2245 all the possible edges then. */
2246 static void
2247 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2248 bool can_throw, bool nonlocal_goto)
2250 edge e;
2251 edge_iterator ei;
2253 FOR_EACH_EDGE (e, ei, bb->succs)
2254 if (!e->dest->aux
2255 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2257 gphi *phi;
2258 gphi_iterator si;
2260 if (!nonlocal_goto)
2261 gcc_assert (e->flags & EDGE_EH);
2263 if (!can_throw)
2264 gcc_assert (!(e->flags & EDGE_EH));
2266 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2268 edge re;
2270 phi = si.phi ();
2272 /* For abnormal goto/call edges the receiver can be the
2273 ENTRY_BLOCK. Do not assert this cannot happen. */
2275 gcc_assert ((e->flags & EDGE_EH)
2276 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2278 re = find_edge (ret_bb, e->dest);
2279 gcc_checking_assert (re);
2280 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2281 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2283 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2284 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2289 /* Insert clobbers for automatic variables of inlined ID->src_fn
2290 function at the start of basic block ID->eh_landing_pad_dest. */
2292 static void
2293 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2295 tree var;
2296 basic_block bb = id->eh_landing_pad_dest;
2297 live_vars_map *vars = NULL;
2298 unsigned int cnt = 0;
2299 unsigned int i;
2300 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2301 if (VAR_P (var)
2302 && !DECL_HARD_REGISTER (var)
2303 && !TREE_THIS_VOLATILE (var)
2304 && !DECL_HAS_VALUE_EXPR_P (var)
2305 && !is_gimple_reg (var)
2306 && auto_var_in_fn_p (var, id->src_fn)
2307 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2309 tree *t = id->decl_map->get (var);
2310 if (!t)
2311 continue;
2312 tree new_var = *t;
2313 if (VAR_P (new_var)
2314 && !DECL_HARD_REGISTER (new_var)
2315 && !TREE_THIS_VOLATILE (new_var)
2316 && !DECL_HAS_VALUE_EXPR_P (new_var)
2317 && !is_gimple_reg (new_var)
2318 && auto_var_in_fn_p (new_var, id->dst_fn))
2320 if (vars == NULL)
2321 vars = new live_vars_map;
2322 vars->put (DECL_UID (var), cnt++);
2325 if (vars == NULL)
2326 return;
2328 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2329 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2330 if (VAR_P (var))
2332 edge e;
2333 edge_iterator ei;
2334 bool needed = false;
2335 unsigned int *v = vars->get (DECL_UID (var));
2336 if (v == NULL)
2337 continue;
2338 FOR_EACH_EDGE (e, ei, bb->preds)
2339 if ((e->flags & EDGE_EH) != 0
2340 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2342 basic_block src_bb = (basic_block) e->src->aux;
2344 if (bitmap_bit_p (&live[src_bb->index], *v))
2346 needed = true;
2347 break;
2350 if (needed)
2352 tree new_var = *id->decl_map->get (var);
2353 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2354 tree clobber = build_clobber (TREE_TYPE (new_var));
2355 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2356 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2359 destroy_live_vars (live);
2360 delete vars;
2363 /* Copy edges from BB into its copy constructed earlier, scale profile
2364 accordingly. Edges will be taken care of later. Assume aux
2365 pointers to point to the copies of each BB. Return true if any
2366 debug stmts are left after a statement that must end the basic block. */
2368 static bool
2369 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2370 basic_block ret_bb, basic_block abnormal_goto_dest,
2371 copy_body_data *id)
2373 basic_block new_bb = (basic_block) bb->aux;
2374 edge_iterator ei;
2375 edge old_edge;
2376 gimple_stmt_iterator si;
2377 bool need_debug_cleanup = false;
2379 /* Use the indices from the original blocks to create edges for the
2380 new ones. */
2381 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2382 if (!(old_edge->flags & EDGE_EH))
2384 edge new_edge;
2385 int flags = old_edge->flags;
2386 location_t locus = old_edge->goto_locus;
2388 /* Return edges do get a FALLTHRU flag when they get inlined. */
2389 if (old_edge->dest->index == EXIT_BLOCK
2390 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2391 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2392 flags |= EDGE_FALLTHRU;
2394 new_edge
2395 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2396 new_edge->probability = old_edge->probability;
2397 if (!id->reset_location)
2398 new_edge->goto_locus = remap_location (locus, id);
2401 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2402 return false;
2404 /* When doing function splitting, we must decrease count of the return block
2405 which was previously reachable by block we did not copy. */
2406 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2407 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2408 if (old_edge->src->index != ENTRY_BLOCK
2409 && !old_edge->src->aux)
2410 new_bb->count -= old_edge->count ().apply_scale (num, den);
2412 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2414 gimple *copy_stmt;
2415 bool can_throw, nonlocal_goto;
2417 copy_stmt = gsi_stmt (si);
2418 if (!is_gimple_debug (copy_stmt))
2419 update_stmt (copy_stmt);
2421 /* Do this before the possible split_block. */
2422 gsi_next (&si);
2424 /* If this tree could throw an exception, there are two
2425 cases where we need to add abnormal edge(s): the
2426 tree wasn't in a region and there is a "current
2427 region" in the caller; or the original tree had
2428 EH edges. In both cases split the block after the tree,
2429 and add abnormal edge(s) as needed; we need both
2430 those from the callee and the caller.
2431 We check whether the copy can throw, because the const
2432 propagation can change an INDIRECT_REF which throws
2433 into a COMPONENT_REF which doesn't. If the copy
2434 can throw, the original could also throw. */
2435 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2436 nonlocal_goto
2437 = (stmt_can_make_abnormal_goto (copy_stmt)
2438 && !computed_goto_p (copy_stmt));
2440 if (can_throw || nonlocal_goto)
2442 if (!gsi_end_p (si))
2444 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2445 gsi_next (&si);
2446 if (gsi_end_p (si))
2447 need_debug_cleanup = true;
2449 if (!gsi_end_p (si))
2450 /* Note that bb's predecessor edges aren't necessarily
2451 right at this point; split_block doesn't care. */
2453 edge e = split_block (new_bb, copy_stmt);
2455 new_bb = e->dest;
2456 new_bb->aux = e->src->aux;
2457 si = gsi_start_bb (new_bb);
2461 bool update_probs = false;
2463 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2465 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2466 update_probs = true;
2468 else if (can_throw)
2470 make_eh_edges (copy_stmt);
2471 update_probs = true;
2474 /* EH edges may not match old edges. Copy as much as possible. */
2475 if (update_probs)
2477 edge e;
2478 edge_iterator ei;
2479 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2481 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2482 if ((old_edge->flags & EDGE_EH)
2483 && (e = find_edge (copy_stmt_bb,
2484 (basic_block) old_edge->dest->aux))
2485 && (e->flags & EDGE_EH))
2486 e->probability = old_edge->probability;
2488 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2489 if (e->flags & EDGE_EH)
2491 if (!e->probability.initialized_p ())
2492 e->probability = profile_probability::never ();
2493 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2495 if (id->eh_landing_pad_dest == NULL)
2496 id->eh_landing_pad_dest = e->dest;
2497 else
2498 gcc_assert (id->eh_landing_pad_dest == e->dest);
2504 /* If the call we inline cannot make abnormal goto do not add
2505 additional abnormal edges but only retain those already present
2506 in the original function body. */
2507 if (abnormal_goto_dest == NULL)
2508 nonlocal_goto = false;
2509 if (nonlocal_goto)
2511 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2513 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2514 nonlocal_goto = false;
2515 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2516 in OpenMP regions which aren't allowed to be left abnormally.
2517 So, no need to add abnormal edge in that case. */
2518 else if (is_gimple_call (copy_stmt)
2519 && gimple_call_internal_p (copy_stmt)
2520 && (gimple_call_internal_fn (copy_stmt)
2521 == IFN_ABNORMAL_DISPATCHER)
2522 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2523 nonlocal_goto = false;
2524 else
2525 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2526 EDGE_ABNORMAL);
2529 if ((can_throw || nonlocal_goto)
2530 && gimple_in_ssa_p (cfun))
2531 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2532 can_throw, nonlocal_goto);
2534 return need_debug_cleanup;
2537 /* Copy the PHIs. All blocks and edges are copied, some blocks
2538 was possibly split and new outgoing EH edges inserted.
2539 BB points to the block of original function and AUX pointers links
2540 the original and newly copied blocks. */
2542 static void
2543 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2545 basic_block const new_bb = (basic_block) bb->aux;
2546 edge_iterator ei;
2547 gphi *phi;
2548 gphi_iterator si;
2549 edge new_edge;
2550 bool inserted = false;
2552 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2554 tree res, new_res;
2555 gphi *new_phi;
2557 phi = si.phi ();
2558 res = PHI_RESULT (phi);
2559 new_res = res;
2560 if (!virtual_operand_p (res))
2562 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2563 if (EDGE_COUNT (new_bb->preds) == 0)
2565 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2566 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2568 else
2570 new_phi = create_phi_node (new_res, new_bb);
2571 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2573 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2574 bb);
2575 tree arg;
2576 tree new_arg;
2577 edge_iterator ei2;
2578 location_t locus;
2580 /* When doing partial cloning, we allow PHIs on the entry
2581 block as long as all the arguments are the same.
2582 Find any input edge to see argument to copy. */
2583 if (!old_edge)
2584 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2585 if (!old_edge->src->aux)
2586 break;
2588 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2589 new_arg = arg;
2590 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2591 gcc_assert (new_arg);
2592 /* With return slot optimization we can end up with
2593 non-gimple (foo *)&this->m, fix that here. */
2594 if (TREE_CODE (new_arg) != SSA_NAME
2595 && TREE_CODE (new_arg) != FUNCTION_DECL
2596 && !is_gimple_val (new_arg))
2598 gimple_seq stmts = NULL;
2599 new_arg = force_gimple_operand (new_arg, &stmts, true,
2600 NULL);
2601 gsi_insert_seq_on_edge (new_edge, stmts);
2602 inserted = true;
2604 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2605 if (id->reset_location)
2606 locus = input_location;
2607 else
2608 locus = remap_location (locus, id);
2609 add_phi_arg (new_phi, new_arg, new_edge, locus);
2615 /* Commit the delayed edge insertions. */
2616 if (inserted)
2617 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2618 gsi_commit_one_edge_insert (new_edge, NULL);
2622 /* Wrapper for remap_decl so it can be used as a callback. */
2624 static tree
2625 remap_decl_1 (tree decl, void *data)
2627 return remap_decl (decl, (copy_body_data *) data);
2630 /* Build struct function and associated datastructures for the new clone
2631 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2632 the cfun to the function of new_fndecl (and current_function_decl too). */
2634 static void
2635 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2637 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2639 if (!DECL_ARGUMENTS (new_fndecl))
2640 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2641 if (!DECL_RESULT (new_fndecl))
2642 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2644 /* Register specific tree functions. */
2645 gimple_register_cfg_hooks ();
2647 /* Get clean struct function. */
2648 push_struct_function (new_fndecl);
2650 /* We will rebuild these, so just sanity check that they are empty. */
2651 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2652 gcc_assert (cfun->local_decls == NULL);
2653 gcc_assert (cfun->cfg == NULL);
2654 gcc_assert (cfun->decl == new_fndecl);
2656 /* Copy items we preserve during cloning. */
2657 cfun->static_chain_decl = src_cfun->static_chain_decl;
2658 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2659 cfun->function_end_locus = src_cfun->function_end_locus;
2660 cfun->curr_properties = src_cfun->curr_properties;
2661 cfun->last_verified = src_cfun->last_verified;
2662 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2663 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2664 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2665 cfun->stdarg = src_cfun->stdarg;
2666 cfun->after_inlining = src_cfun->after_inlining;
2667 cfun->can_throw_non_call_exceptions
2668 = src_cfun->can_throw_non_call_exceptions;
2669 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2670 cfun->returns_struct = src_cfun->returns_struct;
2671 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2673 init_empty_tree_cfg ();
2675 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2677 profile_count num = count;
2678 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2679 profile_count::adjust_for_ipa_scaling (&num, &den);
2681 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2682 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2683 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2684 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2685 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2686 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2687 if (src_cfun->eh)
2688 init_eh_for_function ();
2690 if (src_cfun->gimple_df)
2692 init_tree_ssa (cfun);
2693 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2694 if (cfun->gimple_df->in_ssa_p)
2695 init_ssa_operands (cfun);
2699 /* Helper function for copy_cfg_body. Move debug stmts from the end
2700 of NEW_BB to the beginning of successor basic blocks when needed. If the
2701 successor has multiple predecessors, reset them, otherwise keep
2702 their value. */
2704 static void
2705 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2707 edge e;
2708 edge_iterator ei;
2709 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2711 if (gsi_end_p (si)
2712 || gsi_one_before_end_p (si)
2713 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2714 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2715 return;
2717 FOR_EACH_EDGE (e, ei, new_bb->succs)
2719 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2720 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2721 while (is_gimple_debug (gsi_stmt (ssi)))
2723 gimple *stmt = gsi_stmt (ssi);
2724 gdebug *new_stmt;
2725 tree var;
2726 tree value;
2728 /* For the last edge move the debug stmts instead of copying
2729 them. */
2730 if (ei_one_before_end_p (ei))
2732 si = ssi;
2733 gsi_prev (&ssi);
2734 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2736 gimple_debug_bind_reset_value (stmt);
2737 gimple_set_location (stmt, UNKNOWN_LOCATION);
2739 gsi_remove (&si, false);
2740 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2741 continue;
2744 if (gimple_debug_bind_p (stmt))
2746 var = gimple_debug_bind_get_var (stmt);
2747 if (single_pred_p (e->dest))
2749 value = gimple_debug_bind_get_value (stmt);
2750 value = unshare_expr (value);
2751 new_stmt = gimple_build_debug_bind (var, value, stmt);
2753 else
2754 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2756 else if (gimple_debug_source_bind_p (stmt))
2758 var = gimple_debug_source_bind_get_var (stmt);
2759 value = gimple_debug_source_bind_get_value (stmt);
2760 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2762 else if (gimple_debug_nonbind_marker_p (stmt))
2763 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2764 else
2765 gcc_unreachable ();
2766 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2767 id->debug_stmts.safe_push (new_stmt);
2768 gsi_prev (&ssi);
2773 /* Make a copy of the sub-loops of SRC_PARENT and place them
2774 as siblings of DEST_PARENT. */
2776 static void
2777 copy_loops (copy_body_data *id,
2778 struct loop *dest_parent, struct loop *src_parent)
2780 struct loop *src_loop = src_parent->inner;
2781 while (src_loop)
2783 if (!id->blocks_to_copy
2784 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2786 struct loop *dest_loop = alloc_loop ();
2788 /* Assign the new loop its header and latch and associate
2789 those with the new loop. */
2790 dest_loop->header = (basic_block)src_loop->header->aux;
2791 dest_loop->header->loop_father = dest_loop;
2792 if (src_loop->latch != NULL)
2794 dest_loop->latch = (basic_block)src_loop->latch->aux;
2795 dest_loop->latch->loop_father = dest_loop;
2798 /* Copy loop meta-data. */
2799 copy_loop_info (src_loop, dest_loop);
2800 if (dest_loop->unroll)
2801 cfun->has_unroll = true;
2802 if (dest_loop->force_vectorize)
2803 cfun->has_force_vectorize_loops = true;
2804 if (id->src_cfun->last_clique != 0)
2805 dest_loop->owned_clique
2806 = remap_dependence_clique (id,
2807 src_loop->owned_clique
2808 ? src_loop->owned_clique : 1);
2810 /* Finally place it into the loop array and the loop tree. */
2811 place_new_loop (cfun, dest_loop);
2812 flow_loop_tree_node_add (dest_parent, dest_loop);
2814 if (src_loop->simduid)
2816 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2817 cfun->has_simduid_loops = true;
2820 /* Recurse. */
2821 copy_loops (id, dest_loop, src_loop);
2823 src_loop = src_loop->next;
2827 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2829 void
2830 redirect_all_calls (copy_body_data * id, basic_block bb)
2832 gimple_stmt_iterator si;
2833 gimple *last = last_stmt (bb);
2834 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2836 gimple *stmt = gsi_stmt (si);
2837 if (is_gimple_call (stmt))
2839 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2840 if (edge)
2842 edge->redirect_call_stmt_to_callee ();
2843 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2844 gimple_purge_dead_eh_edges (bb);
2850 /* Make a copy of the body of FN so that it can be inserted inline in
2851 another function. Walks FN via CFG, returns new fndecl. */
2853 static tree
2854 copy_cfg_body (copy_body_data * id,
2855 basic_block entry_block_map, basic_block exit_block_map,
2856 basic_block new_entry)
2858 tree callee_fndecl = id->src_fn;
2859 /* Original cfun for the callee, doesn't change. */
2860 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2861 struct function *cfun_to_copy;
2862 basic_block bb;
2863 tree new_fndecl = NULL;
2864 bool need_debug_cleanup = false;
2865 int last;
2866 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2867 profile_count num = entry_block_map->count;
2869 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2871 /* Register specific tree functions. */
2872 gimple_register_cfg_hooks ();
2874 /* If we are inlining just region of the function, make sure to connect
2875 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2876 part of loop, we must compute frequency and probability of
2877 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2878 probabilities of edges incoming from nonduplicated region. */
2879 if (new_entry)
2881 edge e;
2882 edge_iterator ei;
2883 den = profile_count::zero ();
2885 FOR_EACH_EDGE (e, ei, new_entry->preds)
2886 if (!e->src->aux)
2887 den += e->count ();
2888 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2891 profile_count::adjust_for_ipa_scaling (&num, &den);
2893 /* Must have a CFG here at this point. */
2894 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2895 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2898 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2899 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2900 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2901 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2903 /* Duplicate any exception-handling regions. */
2904 if (cfun->eh)
2905 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2906 remap_decl_1, id);
2908 /* Use aux pointers to map the original blocks to copy. */
2909 FOR_EACH_BB_FN (bb, cfun_to_copy)
2910 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2912 basic_block new_bb = copy_bb (id, bb, num, den);
2913 bb->aux = new_bb;
2914 new_bb->aux = bb;
2915 new_bb->loop_father = entry_block_map->loop_father;
2918 last = last_basic_block_for_fn (cfun);
2920 /* Now that we've duplicated the blocks, duplicate their edges. */
2921 basic_block abnormal_goto_dest = NULL;
2922 if (id->call_stmt
2923 && stmt_can_make_abnormal_goto (id->call_stmt))
2925 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2927 bb = gimple_bb (id->call_stmt);
2928 gsi_next (&gsi);
2929 if (gsi_end_p (gsi))
2930 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2932 FOR_ALL_BB_FN (bb, cfun_to_copy)
2933 if (!id->blocks_to_copy
2934 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2935 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2936 abnormal_goto_dest, id);
2938 if (id->eh_landing_pad_dest)
2940 add_clobbers_to_eh_landing_pad (id);
2941 id->eh_landing_pad_dest = NULL;
2944 if (new_entry)
2946 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2947 EDGE_FALLTHRU);
2948 e->probability = profile_probability::always ();
2951 /* Duplicate the loop tree, if available and wanted. */
2952 if (loops_for_fn (src_cfun) != NULL
2953 && current_loops != NULL)
2955 copy_loops (id, entry_block_map->loop_father,
2956 get_loop (src_cfun, 0));
2957 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2958 loops_state_set (LOOPS_NEED_FIXUP);
2961 /* If the loop tree in the source function needed fixup, mark the
2962 destination loop tree for fixup, too. */
2963 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2964 loops_state_set (LOOPS_NEED_FIXUP);
2966 if (gimple_in_ssa_p (cfun))
2967 FOR_ALL_BB_FN (bb, cfun_to_copy)
2968 if (!id->blocks_to_copy
2969 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2970 copy_phis_for_bb (bb, id);
2972 FOR_ALL_BB_FN (bb, cfun_to_copy)
2973 if (bb->aux)
2975 if (need_debug_cleanup
2976 && bb->index != ENTRY_BLOCK
2977 && bb->index != EXIT_BLOCK)
2978 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2979 /* Update call edge destinations. This cannot be done before loop
2980 info is updated, because we may split basic blocks. */
2981 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2982 && bb->index != ENTRY_BLOCK
2983 && bb->index != EXIT_BLOCK)
2984 redirect_all_calls (id, (basic_block)bb->aux);
2985 ((basic_block)bb->aux)->aux = NULL;
2986 bb->aux = NULL;
2989 /* Zero out AUX fields of newly created block during EH edge
2990 insertion. */
2991 for (; last < last_basic_block_for_fn (cfun); last++)
2993 if (need_debug_cleanup)
2994 maybe_move_debug_stmts_to_successors (id,
2995 BASIC_BLOCK_FOR_FN (cfun, last));
2996 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2997 /* Update call edge destinations. This cannot be done before loop
2998 info is updated, because we may split basic blocks. */
2999 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3000 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3002 entry_block_map->aux = NULL;
3003 exit_block_map->aux = NULL;
3005 if (id->eh_map)
3007 delete id->eh_map;
3008 id->eh_map = NULL;
3010 if (id->dependence_map)
3012 delete id->dependence_map;
3013 id->dependence_map = NULL;
3016 return new_fndecl;
3019 /* Copy the debug STMT using ID. We deal with these statements in a
3020 special way: if any variable in their VALUE expression wasn't
3021 remapped yet, we won't remap it, because that would get decl uids
3022 out of sync, causing codegen differences between -g and -g0. If
3023 this arises, we drop the VALUE expression altogether. */
3025 static void
3026 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3028 tree t, *n;
3029 struct walk_stmt_info wi;
3031 if (tree block = gimple_block (stmt))
3033 n = id->decl_map->get (block);
3034 gimple_set_block (stmt, n ? *n : id->block);
3037 if (gimple_debug_nonbind_marker_p (stmt))
3038 return;
3040 /* Remap all the operands in COPY. */
3041 memset (&wi, 0, sizeof (wi));
3042 wi.info = id;
3044 processing_debug_stmt = 1;
3046 if (gimple_debug_source_bind_p (stmt))
3047 t = gimple_debug_source_bind_get_var (stmt);
3048 else if (gimple_debug_bind_p (stmt))
3049 t = gimple_debug_bind_get_var (stmt);
3050 else
3051 gcc_unreachable ();
3053 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3054 && (n = id->debug_map->get (t)))
3056 gcc_assert (VAR_P (*n));
3057 t = *n;
3059 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3060 /* T is a non-localized variable. */;
3061 else
3062 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3064 if (gimple_debug_bind_p (stmt))
3066 gimple_debug_bind_set_var (stmt, t);
3068 if (gimple_debug_bind_has_value_p (stmt))
3069 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3070 remap_gimple_op_r, &wi, NULL);
3072 /* Punt if any decl couldn't be remapped. */
3073 if (processing_debug_stmt < 0)
3074 gimple_debug_bind_reset_value (stmt);
3076 else if (gimple_debug_source_bind_p (stmt))
3078 gimple_debug_source_bind_set_var (stmt, t);
3079 /* When inlining and source bind refers to one of the optimized
3080 away parameters, change the source bind into normal debug bind
3081 referring to the corresponding DEBUG_EXPR_DECL that should have
3082 been bound before the call stmt. */
3083 t = gimple_debug_source_bind_get_value (stmt);
3084 if (t != NULL_TREE
3085 && TREE_CODE (t) == PARM_DECL
3086 && id->call_stmt)
3088 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3089 unsigned int i;
3090 if (debug_args != NULL)
3092 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3093 if ((**debug_args)[i] == DECL_ORIGIN (t)
3094 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3096 t = (**debug_args)[i + 1];
3097 stmt->subcode = GIMPLE_DEBUG_BIND;
3098 gimple_debug_bind_set_value (stmt, t);
3099 break;
3103 if (gimple_debug_source_bind_p (stmt))
3104 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3105 remap_gimple_op_r, &wi, NULL);
3108 processing_debug_stmt = 0;
3110 update_stmt (stmt);
3113 /* Process deferred debug stmts. In order to give values better odds
3114 of being successfully remapped, we delay the processing of debug
3115 stmts until all other stmts that might require remapping are
3116 processed. */
3118 static void
3119 copy_debug_stmts (copy_body_data *id)
3121 size_t i;
3122 gdebug *stmt;
3124 if (!id->debug_stmts.exists ())
3125 return;
3127 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3128 copy_debug_stmt (stmt, id);
3130 id->debug_stmts.release ();
3133 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3134 another function. */
3136 static tree
3137 copy_tree_body (copy_body_data *id)
3139 tree fndecl = id->src_fn;
3140 tree body = DECL_SAVED_TREE (fndecl);
3142 walk_tree (&body, copy_tree_body_r, id, NULL);
3144 return body;
3147 /* Make a copy of the body of FN so that it can be inserted inline in
3148 another function. */
3150 static tree
3151 copy_body (copy_body_data *id,
3152 basic_block entry_block_map, basic_block exit_block_map,
3153 basic_block new_entry)
3155 tree fndecl = id->src_fn;
3156 tree body;
3158 /* If this body has a CFG, walk CFG and copy. */
3159 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3160 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3161 new_entry);
3162 copy_debug_stmts (id);
3164 return body;
3167 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3168 defined in function FN, or of a data member thereof. */
3170 static bool
3171 self_inlining_addr_expr (tree value, tree fn)
3173 tree var;
3175 if (TREE_CODE (value) != ADDR_EXPR)
3176 return false;
3178 var = get_base_address (TREE_OPERAND (value, 0));
3180 return var && auto_var_in_fn_p (var, fn);
3183 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3184 lexical block and line number information from base_stmt, if given,
3185 or from the last stmt of the block otherwise. */
3187 static gimple *
3188 insert_init_debug_bind (copy_body_data *id,
3189 basic_block bb, tree var, tree value,
3190 gimple *base_stmt)
3192 gimple *note;
3193 gimple_stmt_iterator gsi;
3194 tree tracked_var;
3196 if (!gimple_in_ssa_p (id->src_cfun))
3197 return NULL;
3199 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3200 return NULL;
3202 tracked_var = target_for_debug_bind (var);
3203 if (!tracked_var)
3204 return NULL;
3206 if (bb)
3208 gsi = gsi_last_bb (bb);
3209 if (!base_stmt && !gsi_end_p (gsi))
3210 base_stmt = gsi_stmt (gsi);
3213 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3215 if (bb)
3217 if (!gsi_end_p (gsi))
3218 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3219 else
3220 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3223 return note;
3226 static void
3227 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3229 /* If VAR represents a zero-sized variable, it's possible that the
3230 assignment statement may result in no gimple statements. */
3231 if (init_stmt)
3233 gimple_stmt_iterator si = gsi_last_bb (bb);
3235 /* We can end up with init statements that store to a non-register
3236 from a rhs with a conversion. Handle that here by forcing the
3237 rhs into a temporary. gimple_regimplify_operands is not
3238 prepared to do this for us. */
3239 if (!is_gimple_debug (init_stmt)
3240 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3241 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3242 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3244 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3245 gimple_expr_type (init_stmt),
3246 gimple_assign_rhs1 (init_stmt));
3247 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3248 GSI_NEW_STMT);
3249 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3250 gimple_assign_set_rhs1 (init_stmt, rhs);
3252 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3253 gimple_regimplify_operands (init_stmt, &si);
3255 if (!is_gimple_debug (init_stmt))
3257 tree def = gimple_assign_lhs (init_stmt);
3258 insert_init_debug_bind (id, bb, def, def, init_stmt);
3263 /* Initialize parameter P with VALUE. If needed, produce init statement
3264 at the end of BB. When BB is NULL, we return init statement to be
3265 output later. */
3266 static gimple *
3267 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3268 basic_block bb, tree *vars)
3270 gimple *init_stmt = NULL;
3271 tree var;
3272 tree rhs = value;
3273 tree def = (gimple_in_ssa_p (cfun)
3274 ? ssa_default_def (id->src_cfun, p) : NULL);
3276 if (value
3277 && value != error_mark_node
3278 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3280 /* If we can match up types by promotion/demotion do so. */
3281 if (fold_convertible_p (TREE_TYPE (p), value))
3282 rhs = fold_convert (TREE_TYPE (p), value);
3283 else
3285 /* ??? For valid programs we should not end up here.
3286 Still if we end up with truly mismatched types here, fall back
3287 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3288 GIMPLE to the following passes. */
3289 if (!is_gimple_reg_type (TREE_TYPE (value))
3290 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3291 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3292 else
3293 rhs = build_zero_cst (TREE_TYPE (p));
3297 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3298 here since the type of this decl must be visible to the calling
3299 function. */
3300 var = copy_decl_to_var (p, id);
3302 /* Declare this new variable. */
3303 DECL_CHAIN (var) = *vars;
3304 *vars = var;
3306 /* Make gimplifier happy about this variable. */
3307 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3309 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3310 we would not need to create a new variable here at all, if it
3311 weren't for debug info. Still, we can just use the argument
3312 value. */
3313 if (TREE_READONLY (p)
3314 && !TREE_ADDRESSABLE (p)
3315 && value && !TREE_SIDE_EFFECTS (value)
3316 && !def)
3318 /* We may produce non-gimple trees by adding NOPs or introduce
3319 invalid sharing when operand is not really constant.
3320 It is not big deal to prohibit constant propagation here as
3321 we will constant propagate in DOM1 pass anyway. */
3322 if (is_gimple_min_invariant (value)
3323 && useless_type_conversion_p (TREE_TYPE (p),
3324 TREE_TYPE (value))
3325 /* We have to be very careful about ADDR_EXPR. Make sure
3326 the base variable isn't a local variable of the inlined
3327 function, e.g., when doing recursive inlining, direct or
3328 mutually-recursive or whatever, which is why we don't
3329 just test whether fn == current_function_decl. */
3330 && ! self_inlining_addr_expr (value, fn))
3332 insert_decl_map (id, p, value);
3333 insert_debug_decl_map (id, p, var);
3334 return insert_init_debug_bind (id, bb, var, value, NULL);
3338 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3339 that way, when the PARM_DECL is encountered, it will be
3340 automatically replaced by the VAR_DECL. */
3341 insert_decl_map (id, p, var);
3343 /* Even if P was TREE_READONLY, the new VAR should not be.
3344 In the original code, we would have constructed a
3345 temporary, and then the function body would have never
3346 changed the value of P. However, now, we will be
3347 constructing VAR directly. The constructor body may
3348 change its value multiple times as it is being
3349 constructed. Therefore, it must not be TREE_READONLY;
3350 the back-end assumes that TREE_READONLY variable is
3351 assigned to only once. */
3352 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3353 TREE_READONLY (var) = 0;
3355 /* If there is no setup required and we are in SSA, take the easy route
3356 replacing all SSA names representing the function parameter by the
3357 SSA name passed to function.
3359 We need to construct map for the variable anyway as it might be used
3360 in different SSA names when parameter is set in function.
3362 Do replacement at -O0 for const arguments replaced by constant.
3363 This is important for builtin_constant_p and other construct requiring
3364 constant argument to be visible in inlined function body. */
3365 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3366 && (optimize
3367 || (TREE_READONLY (p)
3368 && is_gimple_min_invariant (rhs)))
3369 && (TREE_CODE (rhs) == SSA_NAME
3370 || is_gimple_min_invariant (rhs))
3371 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3373 insert_decl_map (id, def, rhs);
3374 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3377 /* If the value of argument is never used, don't care about initializing
3378 it. */
3379 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3381 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3382 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3385 /* Initialize this VAR_DECL from the equivalent argument. Convert
3386 the argument to the proper type in case it was promoted. */
3387 if (value)
3389 if (rhs == error_mark_node)
3391 insert_decl_map (id, p, var);
3392 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3395 STRIP_USELESS_TYPE_CONVERSION (rhs);
3397 /* If we are in SSA form properly remap the default definition
3398 or assign to a dummy SSA name if the parameter is unused and
3399 we are not optimizing. */
3400 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3402 if (def)
3404 def = remap_ssa_name (def, id);
3405 init_stmt = gimple_build_assign (def, rhs);
3406 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3407 set_ssa_default_def (cfun, var, NULL);
3409 else if (!optimize)
3411 def = make_ssa_name (var);
3412 init_stmt = gimple_build_assign (def, rhs);
3415 else
3416 init_stmt = gimple_build_assign (var, rhs);
3418 if (bb && init_stmt)
3419 insert_init_stmt (id, bb, init_stmt);
3421 return init_stmt;
3424 /* Generate code to initialize the parameters of the function at the
3425 top of the stack in ID from the GIMPLE_CALL STMT. */
3427 static void
3428 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3429 tree fn, basic_block bb)
3431 tree parms;
3432 size_t i;
3433 tree p;
3434 tree vars = NULL_TREE;
3435 tree static_chain = gimple_call_chain (stmt);
3437 /* Figure out what the parameters are. */
3438 parms = DECL_ARGUMENTS (fn);
3440 /* Loop through the parameter declarations, replacing each with an
3441 equivalent VAR_DECL, appropriately initialized. */
3442 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3444 tree val;
3445 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3446 setup_one_parameter (id, p, val, fn, bb, &vars);
3448 /* After remapping parameters remap their types. This has to be done
3449 in a second loop over all parameters to appropriately remap
3450 variable sized arrays when the size is specified in a
3451 parameter following the array. */
3452 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3454 tree *varp = id->decl_map->get (p);
3455 if (varp && VAR_P (*varp))
3457 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3458 ? ssa_default_def (id->src_cfun, p) : NULL);
3459 tree var = *varp;
3460 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3461 /* Also remap the default definition if it was remapped
3462 to the default definition of the parameter replacement
3463 by the parameter setup. */
3464 if (def)
3466 tree *defp = id->decl_map->get (def);
3467 if (defp
3468 && TREE_CODE (*defp) == SSA_NAME
3469 && SSA_NAME_VAR (*defp) == var)
3470 TREE_TYPE (*defp) = TREE_TYPE (var);
3475 /* Initialize the static chain. */
3476 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3477 gcc_assert (fn != current_function_decl);
3478 if (p)
3480 /* No static chain? Seems like a bug in tree-nested.c. */
3481 gcc_assert (static_chain);
3483 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3486 declare_inline_vars (id->block, vars);
3490 /* Declare a return variable to replace the RESULT_DECL for the
3491 function we are calling. An appropriate DECL_STMT is returned.
3492 The USE_STMT is filled to contain a use of the declaration to
3493 indicate the return value of the function.
3495 RETURN_SLOT, if non-null is place where to store the result. It
3496 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3497 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3499 The return value is a (possibly null) value that holds the result
3500 as seen by the caller. */
3502 static tree
3503 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3504 basic_block entry_bb)
3506 tree callee = id->src_fn;
3507 tree result = DECL_RESULT (callee);
3508 tree callee_type = TREE_TYPE (result);
3509 tree caller_type;
3510 tree var, use;
3512 /* Handle type-mismatches in the function declaration return type
3513 vs. the call expression. */
3514 if (modify_dest)
3515 caller_type = TREE_TYPE (modify_dest);
3516 else
3517 caller_type = TREE_TYPE (TREE_TYPE (callee));
3519 /* We don't need to do anything for functions that don't return anything. */
3520 if (VOID_TYPE_P (callee_type))
3521 return NULL_TREE;
3523 /* If there was a return slot, then the return value is the
3524 dereferenced address of that object. */
3525 if (return_slot)
3527 /* The front end shouldn't have used both return_slot and
3528 a modify expression. */
3529 gcc_assert (!modify_dest);
3530 if (DECL_BY_REFERENCE (result))
3532 tree return_slot_addr = build_fold_addr_expr (return_slot);
3533 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3535 /* We are going to construct *&return_slot and we can't do that
3536 for variables believed to be not addressable.
3538 FIXME: This check possibly can match, because values returned
3539 via return slot optimization are not believed to have address
3540 taken by alias analysis. */
3541 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3542 var = return_slot_addr;
3544 else
3546 var = return_slot;
3547 gcc_assert (TREE_CODE (var) != SSA_NAME);
3548 if (TREE_ADDRESSABLE (result))
3549 mark_addressable (var);
3551 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3552 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3553 && !DECL_GIMPLE_REG_P (result)
3554 && DECL_P (var))
3555 DECL_GIMPLE_REG_P (var) = 0;
3556 use = NULL;
3557 goto done;
3560 /* All types requiring non-trivial constructors should have been handled. */
3561 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3563 /* Attempt to avoid creating a new temporary variable. */
3564 if (modify_dest
3565 && TREE_CODE (modify_dest) != SSA_NAME)
3567 bool use_it = false;
3569 /* We can't use MODIFY_DEST if there's type promotion involved. */
3570 if (!useless_type_conversion_p (callee_type, caller_type))
3571 use_it = false;
3573 /* ??? If we're assigning to a variable sized type, then we must
3574 reuse the destination variable, because we've no good way to
3575 create variable sized temporaries at this point. */
3576 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3577 use_it = true;
3579 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3580 reuse it as the result of the call directly. Don't do this if
3581 it would promote MODIFY_DEST to addressable. */
3582 else if (TREE_ADDRESSABLE (result))
3583 use_it = false;
3584 else
3586 tree base_m = get_base_address (modify_dest);
3588 /* If the base isn't a decl, then it's a pointer, and we don't
3589 know where that's going to go. */
3590 if (!DECL_P (base_m))
3591 use_it = false;
3592 else if (is_global_var (base_m))
3593 use_it = false;
3594 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3595 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3596 && !DECL_GIMPLE_REG_P (result)
3597 && DECL_GIMPLE_REG_P (base_m))
3598 use_it = false;
3599 else if (!TREE_ADDRESSABLE (base_m))
3600 use_it = true;
3603 if (use_it)
3605 var = modify_dest;
3606 use = NULL;
3607 goto done;
3611 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3613 var = copy_result_decl_to_var (result, id);
3614 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3616 /* Do not have the rest of GCC warn about this variable as it should
3617 not be visible to the user. */
3618 TREE_NO_WARNING (var) = 1;
3620 declare_inline_vars (id->block, var);
3622 /* Build the use expr. If the return type of the function was
3623 promoted, convert it back to the expected type. */
3624 use = var;
3625 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3627 /* If we can match up types by promotion/demotion do so. */
3628 if (fold_convertible_p (caller_type, var))
3629 use = fold_convert (caller_type, var);
3630 else
3632 /* ??? For valid programs we should not end up here.
3633 Still if we end up with truly mismatched types here, fall back
3634 to using a MEM_REF to not leak invalid GIMPLE to the following
3635 passes. */
3636 /* Prevent var from being written into SSA form. */
3637 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3638 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3639 DECL_GIMPLE_REG_P (var) = false;
3640 else if (is_gimple_reg_type (TREE_TYPE (var)))
3641 TREE_ADDRESSABLE (var) = true;
3642 use = fold_build2 (MEM_REF, caller_type,
3643 build_fold_addr_expr (var),
3644 build_int_cst (ptr_type_node, 0));
3648 STRIP_USELESS_TYPE_CONVERSION (use);
3650 if (DECL_BY_REFERENCE (result))
3652 TREE_ADDRESSABLE (var) = 1;
3653 var = build_fold_addr_expr (var);
3656 done:
3657 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3658 way, when the RESULT_DECL is encountered, it will be
3659 automatically replaced by the VAR_DECL.
3661 When returning by reference, ensure that RESULT_DECL remaps to
3662 gimple_val. */
3663 if (DECL_BY_REFERENCE (result)
3664 && !is_gimple_val (var))
3666 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3667 insert_decl_map (id, result, temp);
3668 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3669 it's default_def SSA_NAME. */
3670 if (gimple_in_ssa_p (id->src_cfun)
3671 && is_gimple_reg (result))
3673 temp = make_ssa_name (temp);
3674 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3676 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3678 else
3679 insert_decl_map (id, result, var);
3681 /* Remember this so we can ignore it in remap_decls. */
3682 id->retvar = var;
3683 return use;
3686 /* Determine if the function can be copied. If so return NULL. If
3687 not return a string describng the reason for failure. */
3689 const char *
3690 copy_forbidden (struct function *fun)
3692 const char *reason = fun->cannot_be_copied_reason;
3694 /* Only examine the function once. */
3695 if (fun->cannot_be_copied_set)
3696 return reason;
3698 /* We cannot copy a function that receives a non-local goto
3699 because we cannot remap the destination label used in the
3700 function that is performing the non-local goto. */
3701 /* ??? Actually, this should be possible, if we work at it.
3702 No doubt there's just a handful of places that simply
3703 assume it doesn't happen and don't substitute properly. */
3704 if (fun->has_nonlocal_label)
3706 reason = G_("function %q+F can never be copied "
3707 "because it receives a non-local goto");
3708 goto fail;
3711 if (fun->has_forced_label_in_static)
3713 reason = G_("function %q+F can never be copied because it saves "
3714 "address of local label in a static variable");
3715 goto fail;
3718 fail:
3719 fun->cannot_be_copied_reason = reason;
3720 fun->cannot_be_copied_set = true;
3721 return reason;
3725 static const char *inline_forbidden_reason;
3727 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3728 iff a function cannot be inlined. Also sets the reason why. */
3730 static tree
3731 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3732 struct walk_stmt_info *wip)
3734 tree fn = (tree) wip->info;
3735 tree t;
3736 gimple *stmt = gsi_stmt (*gsi);
3738 switch (gimple_code (stmt))
3740 case GIMPLE_CALL:
3741 /* Refuse to inline alloca call unless user explicitly forced so as
3742 this may change program's memory overhead drastically when the
3743 function using alloca is called in loop. In GCC present in
3744 SPEC2000 inlining into schedule_block cause it to require 2GB of
3745 RAM instead of 256MB. Don't do so for alloca calls emitted for
3746 VLA objects as those can't cause unbounded growth (they're always
3747 wrapped inside stack_save/stack_restore regions. */
3748 if (gimple_maybe_alloca_call_p (stmt)
3749 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3750 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3752 inline_forbidden_reason
3753 = G_("function %q+F can never be inlined because it uses "
3754 "alloca (override using the always_inline attribute)");
3755 *handled_ops_p = true;
3756 return fn;
3759 t = gimple_call_fndecl (stmt);
3760 if (t == NULL_TREE)
3761 break;
3763 /* We cannot inline functions that call setjmp. */
3764 if (setjmp_call_p (t))
3766 inline_forbidden_reason
3767 = G_("function %q+F can never be inlined because it uses setjmp");
3768 *handled_ops_p = true;
3769 return t;
3772 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3773 switch (DECL_FUNCTION_CODE (t))
3775 /* We cannot inline functions that take a variable number of
3776 arguments. */
3777 case BUILT_IN_VA_START:
3778 case BUILT_IN_NEXT_ARG:
3779 case BUILT_IN_VA_END:
3780 inline_forbidden_reason
3781 = G_("function %q+F can never be inlined because it "
3782 "uses variable argument lists");
3783 *handled_ops_p = true;
3784 return t;
3786 case BUILT_IN_LONGJMP:
3787 /* We can't inline functions that call __builtin_longjmp at
3788 all. The non-local goto machinery really requires the
3789 destination be in a different function. If we allow the
3790 function calling __builtin_longjmp to be inlined into the
3791 function calling __builtin_setjmp, Things will Go Awry. */
3792 inline_forbidden_reason
3793 = G_("function %q+F can never be inlined because "
3794 "it uses setjmp-longjmp exception handling");
3795 *handled_ops_p = true;
3796 return t;
3798 case BUILT_IN_NONLOCAL_GOTO:
3799 /* Similarly. */
3800 inline_forbidden_reason
3801 = G_("function %q+F can never be inlined because "
3802 "it uses non-local goto");
3803 *handled_ops_p = true;
3804 return t;
3806 case BUILT_IN_RETURN:
3807 case BUILT_IN_APPLY_ARGS:
3808 /* If a __builtin_apply_args caller would be inlined,
3809 it would be saving arguments of the function it has
3810 been inlined into. Similarly __builtin_return would
3811 return from the function the inline has been inlined into. */
3812 inline_forbidden_reason
3813 = G_("function %q+F can never be inlined because "
3814 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3815 *handled_ops_p = true;
3816 return t;
3818 default:
3819 break;
3821 break;
3823 case GIMPLE_GOTO:
3824 t = gimple_goto_dest (stmt);
3826 /* We will not inline a function which uses computed goto. The
3827 addresses of its local labels, which may be tucked into
3828 global storage, are of course not constant across
3829 instantiations, which causes unexpected behavior. */
3830 if (TREE_CODE (t) != LABEL_DECL)
3832 inline_forbidden_reason
3833 = G_("function %q+F can never be inlined "
3834 "because it contains a computed goto");
3835 *handled_ops_p = true;
3836 return t;
3838 break;
3840 default:
3841 break;
3844 *handled_ops_p = false;
3845 return NULL_TREE;
3848 /* Return true if FNDECL is a function that cannot be inlined into
3849 another one. */
3851 static bool
3852 inline_forbidden_p (tree fndecl)
3854 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3855 struct walk_stmt_info wi;
3856 basic_block bb;
3857 bool forbidden_p = false;
3859 /* First check for shared reasons not to copy the code. */
3860 inline_forbidden_reason = copy_forbidden (fun);
3861 if (inline_forbidden_reason != NULL)
3862 return true;
3864 /* Next, walk the statements of the function looking for
3865 constraucts we can't handle, or are non-optimal for inlining. */
3866 hash_set<tree> visited_nodes;
3867 memset (&wi, 0, sizeof (wi));
3868 wi.info = (void *) fndecl;
3869 wi.pset = &visited_nodes;
3871 FOR_EACH_BB_FN (bb, fun)
3873 gimple *ret;
3874 gimple_seq seq = bb_seq (bb);
3875 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3876 forbidden_p = (ret != NULL);
3877 if (forbidden_p)
3878 break;
3881 return forbidden_p;
3884 /* Return false if the function FNDECL cannot be inlined on account of its
3885 attributes, true otherwise. */
3886 static bool
3887 function_attribute_inlinable_p (const_tree fndecl)
3889 if (targetm.attribute_table)
3891 const_tree a;
3893 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3895 const_tree name = TREE_PURPOSE (a);
3896 int i;
3898 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3899 if (is_attribute_p (targetm.attribute_table[i].name, name))
3900 return targetm.function_attribute_inlinable_p (fndecl);
3904 return true;
3907 /* Returns nonzero if FN is a function that does not have any
3908 fundamental inline blocking properties. */
3910 bool
3911 tree_inlinable_function_p (tree fn)
3913 bool inlinable = true;
3914 bool do_warning;
3915 tree always_inline;
3917 /* If we've already decided this function shouldn't be inlined,
3918 there's no need to check again. */
3919 if (DECL_UNINLINABLE (fn))
3920 return false;
3922 /* We only warn for functions declared `inline' by the user. */
3923 do_warning = (warn_inline
3924 && DECL_DECLARED_INLINE_P (fn)
3925 && !DECL_NO_INLINE_WARNING_P (fn)
3926 && !DECL_IN_SYSTEM_HEADER (fn));
3928 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3930 if (flag_no_inline
3931 && always_inline == NULL)
3933 if (do_warning)
3934 warning (OPT_Winline, "function %q+F can never be inlined because it "
3935 "is suppressed using %<-fno-inline%>", fn);
3936 inlinable = false;
3939 else if (!function_attribute_inlinable_p (fn))
3941 if (do_warning)
3942 warning (OPT_Winline, "function %q+F can never be inlined because it "
3943 "uses attributes conflicting with inlining", fn);
3944 inlinable = false;
3947 else if (inline_forbidden_p (fn))
3949 /* See if we should warn about uninlinable functions. Previously,
3950 some of these warnings would be issued while trying to expand
3951 the function inline, but that would cause multiple warnings
3952 about functions that would for example call alloca. But since
3953 this a property of the function, just one warning is enough.
3954 As a bonus we can now give more details about the reason why a
3955 function is not inlinable. */
3956 if (always_inline)
3957 error (inline_forbidden_reason, fn);
3958 else if (do_warning)
3959 warning (OPT_Winline, inline_forbidden_reason, fn);
3961 inlinable = false;
3964 /* Squirrel away the result so that we don't have to check again. */
3965 DECL_UNINLINABLE (fn) = !inlinable;
3967 return inlinable;
3970 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3971 word size and take possible memcpy call into account and return
3972 cost based on whether optimizing for size or speed according to SPEED_P. */
3975 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3977 HOST_WIDE_INT size;
3979 gcc_assert (!VOID_TYPE_P (type));
3981 if (TREE_CODE (type) == VECTOR_TYPE)
3983 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3984 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3985 int orig_mode_size
3986 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3987 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3988 return ((orig_mode_size + simd_mode_size - 1)
3989 / simd_mode_size);
3992 size = int_size_in_bytes (type);
3994 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3995 /* Cost of a memcpy call, 3 arguments and the call. */
3996 return 4;
3997 else
3998 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4001 /* Returns cost of operation CODE, according to WEIGHTS */
4003 static int
4004 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4005 tree op1 ATTRIBUTE_UNUSED, tree op2)
4007 switch (code)
4009 /* These are "free" conversions, or their presumed cost
4010 is folded into other operations. */
4011 case RANGE_EXPR:
4012 CASE_CONVERT:
4013 case COMPLEX_EXPR:
4014 case PAREN_EXPR:
4015 case VIEW_CONVERT_EXPR:
4016 return 0;
4018 /* Assign cost of 1 to usual operations.
4019 ??? We may consider mapping RTL costs to this. */
4020 case COND_EXPR:
4021 case VEC_COND_EXPR:
4022 case VEC_PERM_EXPR:
4024 case PLUS_EXPR:
4025 case POINTER_PLUS_EXPR:
4026 case POINTER_DIFF_EXPR:
4027 case MINUS_EXPR:
4028 case MULT_EXPR:
4029 case MULT_HIGHPART_EXPR:
4031 case ADDR_SPACE_CONVERT_EXPR:
4032 case FIXED_CONVERT_EXPR:
4033 case FIX_TRUNC_EXPR:
4035 case NEGATE_EXPR:
4036 case FLOAT_EXPR:
4037 case MIN_EXPR:
4038 case MAX_EXPR:
4039 case ABS_EXPR:
4040 case ABSU_EXPR:
4042 case LSHIFT_EXPR:
4043 case RSHIFT_EXPR:
4044 case LROTATE_EXPR:
4045 case RROTATE_EXPR:
4047 case BIT_IOR_EXPR:
4048 case BIT_XOR_EXPR:
4049 case BIT_AND_EXPR:
4050 case BIT_NOT_EXPR:
4052 case TRUTH_ANDIF_EXPR:
4053 case TRUTH_ORIF_EXPR:
4054 case TRUTH_AND_EXPR:
4055 case TRUTH_OR_EXPR:
4056 case TRUTH_XOR_EXPR:
4057 case TRUTH_NOT_EXPR:
4059 case LT_EXPR:
4060 case LE_EXPR:
4061 case GT_EXPR:
4062 case GE_EXPR:
4063 case EQ_EXPR:
4064 case NE_EXPR:
4065 case ORDERED_EXPR:
4066 case UNORDERED_EXPR:
4068 case UNLT_EXPR:
4069 case UNLE_EXPR:
4070 case UNGT_EXPR:
4071 case UNGE_EXPR:
4072 case UNEQ_EXPR:
4073 case LTGT_EXPR:
4075 case CONJ_EXPR:
4077 case PREDECREMENT_EXPR:
4078 case PREINCREMENT_EXPR:
4079 case POSTDECREMENT_EXPR:
4080 case POSTINCREMENT_EXPR:
4082 case REALIGN_LOAD_EXPR:
4084 case WIDEN_SUM_EXPR:
4085 case WIDEN_MULT_EXPR:
4086 case DOT_PROD_EXPR:
4087 case SAD_EXPR:
4088 case WIDEN_MULT_PLUS_EXPR:
4089 case WIDEN_MULT_MINUS_EXPR:
4090 case WIDEN_LSHIFT_EXPR:
4092 case VEC_WIDEN_MULT_HI_EXPR:
4093 case VEC_WIDEN_MULT_LO_EXPR:
4094 case VEC_WIDEN_MULT_EVEN_EXPR:
4095 case VEC_WIDEN_MULT_ODD_EXPR:
4096 case VEC_UNPACK_HI_EXPR:
4097 case VEC_UNPACK_LO_EXPR:
4098 case VEC_UNPACK_FLOAT_HI_EXPR:
4099 case VEC_UNPACK_FLOAT_LO_EXPR:
4100 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4101 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4102 case VEC_PACK_TRUNC_EXPR:
4103 case VEC_PACK_SAT_EXPR:
4104 case VEC_PACK_FIX_TRUNC_EXPR:
4105 case VEC_PACK_FLOAT_EXPR:
4106 case VEC_WIDEN_LSHIFT_HI_EXPR:
4107 case VEC_WIDEN_LSHIFT_LO_EXPR:
4108 case VEC_DUPLICATE_EXPR:
4109 case VEC_SERIES_EXPR:
4111 return 1;
4113 /* Few special cases of expensive operations. This is useful
4114 to avoid inlining on functions having too many of these. */
4115 case TRUNC_DIV_EXPR:
4116 case CEIL_DIV_EXPR:
4117 case FLOOR_DIV_EXPR:
4118 case ROUND_DIV_EXPR:
4119 case EXACT_DIV_EXPR:
4120 case TRUNC_MOD_EXPR:
4121 case CEIL_MOD_EXPR:
4122 case FLOOR_MOD_EXPR:
4123 case ROUND_MOD_EXPR:
4124 case RDIV_EXPR:
4125 if (TREE_CODE (op2) != INTEGER_CST)
4126 return weights->div_mod_cost;
4127 return 1;
4129 /* Bit-field insertion needs several shift and mask operations. */
4130 case BIT_INSERT_EXPR:
4131 return 3;
4133 default:
4134 /* We expect a copy assignment with no operator. */
4135 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4136 return 0;
4141 /* Estimate number of instructions that will be created by expanding
4142 the statements in the statement sequence STMTS.
4143 WEIGHTS contains weights attributed to various constructs. */
4146 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4148 int cost;
4149 gimple_stmt_iterator gsi;
4151 cost = 0;
4152 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4153 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4155 return cost;
4159 /* Estimate number of instructions that will be created by expanding STMT.
4160 WEIGHTS contains weights attributed to various constructs. */
4163 estimate_num_insns (gimple *stmt, eni_weights *weights)
4165 unsigned cost, i;
4166 enum gimple_code code = gimple_code (stmt);
4167 tree lhs;
4168 tree rhs;
4170 switch (code)
4172 case GIMPLE_ASSIGN:
4173 /* Try to estimate the cost of assignments. We have three cases to
4174 deal with:
4175 1) Simple assignments to registers;
4176 2) Stores to things that must live in memory. This includes
4177 "normal" stores to scalars, but also assignments of large
4178 structures, or constructors of big arrays;
4180 Let us look at the first two cases, assuming we have "a = b + C":
4181 <GIMPLE_ASSIGN <var_decl "a">
4182 <plus_expr <var_decl "b"> <constant C>>
4183 If "a" is a GIMPLE register, the assignment to it is free on almost
4184 any target, because "a" usually ends up in a real register. Hence
4185 the only cost of this expression comes from the PLUS_EXPR, and we
4186 can ignore the GIMPLE_ASSIGN.
4187 If "a" is not a GIMPLE register, the assignment to "a" will most
4188 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4189 of moving something into "a", which we compute using the function
4190 estimate_move_cost. */
4191 if (gimple_clobber_p (stmt))
4192 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4194 lhs = gimple_assign_lhs (stmt);
4195 rhs = gimple_assign_rhs1 (stmt);
4197 cost = 0;
4199 /* Account for the cost of moving to / from memory. */
4200 if (gimple_store_p (stmt))
4201 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4202 if (gimple_assign_load_p (stmt))
4203 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4205 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4206 gimple_assign_rhs1 (stmt),
4207 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4208 == GIMPLE_BINARY_RHS
4209 ? gimple_assign_rhs2 (stmt) : NULL);
4210 break;
4212 case GIMPLE_COND:
4213 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4214 gimple_op (stmt, 0),
4215 gimple_op (stmt, 1));
4216 break;
4218 case GIMPLE_SWITCH:
4220 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4221 /* Take into account cost of the switch + guess 2 conditional jumps for
4222 each case label.
4224 TODO: once the switch expansion logic is sufficiently separated, we can
4225 do better job on estimating cost of the switch. */
4226 if (weights->time_based)
4227 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4228 else
4229 cost = gimple_switch_num_labels (switch_stmt) * 2;
4231 break;
4233 case GIMPLE_CALL:
4235 tree decl;
4237 if (gimple_call_internal_p (stmt))
4238 return 0;
4239 else if ((decl = gimple_call_fndecl (stmt))
4240 && fndecl_built_in_p (decl))
4242 /* Do not special case builtins where we see the body.
4243 This just confuse inliner. */
4244 struct cgraph_node *node;
4245 if (!(node = cgraph_node::get (decl))
4246 || node->definition)
4248 /* For buitins that are likely expanded to nothing or
4249 inlined do not account operand costs. */
4250 else if (is_simple_builtin (decl))
4251 return 0;
4252 else if (is_inexpensive_builtin (decl))
4253 return weights->target_builtin_call_cost;
4254 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4256 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4257 specialize the cheap expansion we do here.
4258 ??? This asks for a more general solution. */
4259 switch (DECL_FUNCTION_CODE (decl))
4261 case BUILT_IN_POW:
4262 case BUILT_IN_POWF:
4263 case BUILT_IN_POWL:
4264 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4265 && (real_equal
4266 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4267 &dconst2)))
4268 return estimate_operator_cost
4269 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4270 gimple_call_arg (stmt, 0));
4271 break;
4273 default:
4274 break;
4279 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4280 if (gimple_call_lhs (stmt))
4281 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4282 weights->time_based);
4283 for (i = 0; i < gimple_call_num_args (stmt); i++)
4285 tree arg = gimple_call_arg (stmt, i);
4286 cost += estimate_move_cost (TREE_TYPE (arg),
4287 weights->time_based);
4289 break;
4292 case GIMPLE_RETURN:
4293 return weights->return_cost;
4295 case GIMPLE_GOTO:
4296 case GIMPLE_LABEL:
4297 case GIMPLE_NOP:
4298 case GIMPLE_PHI:
4299 case GIMPLE_PREDICT:
4300 case GIMPLE_DEBUG:
4301 return 0;
4303 case GIMPLE_ASM:
4305 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4306 /* 1000 means infinity. This avoids overflows later
4307 with very long asm statements. */
4308 if (count > 1000)
4309 count = 1000;
4310 /* If this asm is asm inline, count anything as minimum size. */
4311 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4312 count = MIN (1, count);
4313 return MAX (1, count);
4316 case GIMPLE_RESX:
4317 /* This is either going to be an external function call with one
4318 argument, or two register copy statements plus a goto. */
4319 return 2;
4321 case GIMPLE_EH_DISPATCH:
4322 /* ??? This is going to turn into a switch statement. Ideally
4323 we'd have a look at the eh region and estimate the number of
4324 edges involved. */
4325 return 10;
4327 case GIMPLE_BIND:
4328 return estimate_num_insns_seq (
4329 gimple_bind_body (as_a <gbind *> (stmt)),
4330 weights);
4332 case GIMPLE_EH_FILTER:
4333 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4335 case GIMPLE_CATCH:
4336 return estimate_num_insns_seq (gimple_catch_handler (
4337 as_a <gcatch *> (stmt)),
4338 weights);
4340 case GIMPLE_TRY:
4341 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4342 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4344 /* OMP directives are generally very expensive. */
4346 case GIMPLE_OMP_RETURN:
4347 case GIMPLE_OMP_SECTIONS_SWITCH:
4348 case GIMPLE_OMP_ATOMIC_STORE:
4349 case GIMPLE_OMP_CONTINUE:
4350 /* ...except these, which are cheap. */
4351 return 0;
4353 case GIMPLE_OMP_ATOMIC_LOAD:
4354 return weights->omp_cost;
4356 case GIMPLE_OMP_FOR:
4357 return (weights->omp_cost
4358 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4359 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4361 case GIMPLE_OMP_PARALLEL:
4362 case GIMPLE_OMP_TASK:
4363 case GIMPLE_OMP_CRITICAL:
4364 case GIMPLE_OMP_MASTER:
4365 case GIMPLE_OMP_TASKGROUP:
4366 case GIMPLE_OMP_ORDERED:
4367 case GIMPLE_OMP_SECTION:
4368 case GIMPLE_OMP_SECTIONS:
4369 case GIMPLE_OMP_SINGLE:
4370 case GIMPLE_OMP_TARGET:
4371 case GIMPLE_OMP_TEAMS:
4372 return (weights->omp_cost
4373 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4375 case GIMPLE_TRANSACTION:
4376 return (weights->tm_cost
4377 + estimate_num_insns_seq (gimple_transaction_body (
4378 as_a <gtransaction *> (stmt)),
4379 weights));
4381 default:
4382 gcc_unreachable ();
4385 return cost;
4388 /* Estimate number of instructions that will be created by expanding
4389 function FNDECL. WEIGHTS contains weights attributed to various
4390 constructs. */
4393 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4395 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4396 gimple_stmt_iterator bsi;
4397 basic_block bb;
4398 int n = 0;
4400 gcc_assert (my_function && my_function->cfg);
4401 FOR_EACH_BB_FN (bb, my_function)
4403 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4404 n += estimate_num_insns (gsi_stmt (bsi), weights);
4407 return n;
4411 /* Initializes weights used by estimate_num_insns. */
4413 void
4414 init_inline_once (void)
4416 eni_size_weights.call_cost = 1;
4417 eni_size_weights.indirect_call_cost = 3;
4418 eni_size_weights.target_builtin_call_cost = 1;
4419 eni_size_weights.div_mod_cost = 1;
4420 eni_size_weights.omp_cost = 40;
4421 eni_size_weights.tm_cost = 10;
4422 eni_size_weights.time_based = false;
4423 eni_size_weights.return_cost = 1;
4425 /* Estimating time for call is difficult, since we have no idea what the
4426 called function does. In the current uses of eni_time_weights,
4427 underestimating the cost does less harm than overestimating it, so
4428 we choose a rather small value here. */
4429 eni_time_weights.call_cost = 10;
4430 eni_time_weights.indirect_call_cost = 15;
4431 eni_time_weights.target_builtin_call_cost = 1;
4432 eni_time_weights.div_mod_cost = 10;
4433 eni_time_weights.omp_cost = 40;
4434 eni_time_weights.tm_cost = 40;
4435 eni_time_weights.time_based = true;
4436 eni_time_weights.return_cost = 2;
4440 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4442 static void
4443 prepend_lexical_block (tree current_block, tree new_block)
4445 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4446 BLOCK_SUBBLOCKS (current_block) = new_block;
4447 BLOCK_SUPERCONTEXT (new_block) = current_block;
4450 /* Add local variables from CALLEE to CALLER. */
4452 static inline void
4453 add_local_variables (struct function *callee, struct function *caller,
4454 copy_body_data *id)
4456 tree var;
4457 unsigned ix;
4459 FOR_EACH_LOCAL_DECL (callee, ix, var)
4460 if (!can_be_nonlocal (var, id))
4462 tree new_var = remap_decl (var, id);
4464 /* Remap debug-expressions. */
4465 if (VAR_P (new_var)
4466 && DECL_HAS_DEBUG_EXPR_P (var)
4467 && new_var != var)
4469 tree tem = DECL_DEBUG_EXPR (var);
4470 bool old_regimplify = id->regimplify;
4471 id->remapping_type_depth++;
4472 walk_tree (&tem, copy_tree_body_r, id, NULL);
4473 id->remapping_type_depth--;
4474 id->regimplify = old_regimplify;
4475 SET_DECL_DEBUG_EXPR (new_var, tem);
4476 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4478 add_local_decl (caller, new_var);
4482 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4483 have brought in or introduced any debug stmts for SRCVAR. */
4485 static inline void
4486 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4488 tree *remappedvarp = id->decl_map->get (srcvar);
4490 if (!remappedvarp)
4491 return;
4493 if (!VAR_P (*remappedvarp))
4494 return;
4496 if (*remappedvarp == id->retvar)
4497 return;
4499 tree tvar = target_for_debug_bind (*remappedvarp);
4500 if (!tvar)
4501 return;
4503 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4504 id->call_stmt);
4505 gimple_seq_add_stmt (bindings, stmt);
4508 /* For each inlined variable for which we may have debug bind stmts,
4509 add before GSI a final debug stmt resetting it, marking the end of
4510 its life, so that var-tracking knows it doesn't have to compute
4511 further locations for it. */
4513 static inline void
4514 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4516 tree var;
4517 unsigned ix;
4518 gimple_seq bindings = NULL;
4520 if (!gimple_in_ssa_p (id->src_cfun))
4521 return;
4523 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4524 return;
4526 for (var = DECL_ARGUMENTS (id->src_fn);
4527 var; var = DECL_CHAIN (var))
4528 reset_debug_binding (id, var, &bindings);
4530 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4531 reset_debug_binding (id, var, &bindings);
4533 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4536 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4538 static bool
4539 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4541 tree use_retvar;
4542 tree fn;
4543 hash_map<tree, tree> *dst;
4544 hash_map<tree, tree> *st = NULL;
4545 tree return_slot;
4546 tree modify_dest;
4547 struct cgraph_edge *cg_edge;
4548 cgraph_inline_failed_t reason;
4549 basic_block return_block;
4550 edge e;
4551 gimple_stmt_iterator gsi, stmt_gsi;
4552 bool successfully_inlined = false;
4553 bool purge_dead_abnormal_edges;
4554 gcall *call_stmt;
4555 unsigned int prop_mask, src_properties;
4556 struct function *dst_cfun;
4557 tree simduid;
4558 use_operand_p use;
4559 gimple *simtenter_stmt = NULL;
4560 vec<tree> *simtvars_save;
4562 /* The gimplifier uses input_location in too many places, such as
4563 internal_get_tmp_var (). */
4564 location_t saved_location = input_location;
4565 input_location = gimple_location (stmt);
4567 /* From here on, we're only interested in CALL_EXPRs. */
4568 call_stmt = dyn_cast <gcall *> (stmt);
4569 if (!call_stmt)
4570 goto egress;
4572 cg_edge = id->dst_node->get_edge (stmt);
4573 gcc_checking_assert (cg_edge);
4574 /* First, see if we can figure out what function is being called.
4575 If we cannot, then there is no hope of inlining the function. */
4576 if (cg_edge->indirect_unknown_callee)
4577 goto egress;
4578 fn = cg_edge->callee->decl;
4579 gcc_checking_assert (fn);
4581 /* If FN is a declaration of a function in a nested scope that was
4582 globally declared inline, we don't set its DECL_INITIAL.
4583 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4584 C++ front-end uses it for cdtors to refer to their internal
4585 declarations, that are not real functions. Fortunately those
4586 don't have trees to be saved, so we can tell by checking their
4587 gimple_body. */
4588 if (!DECL_INITIAL (fn)
4589 && DECL_ABSTRACT_ORIGIN (fn)
4590 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4591 fn = DECL_ABSTRACT_ORIGIN (fn);
4593 /* Don't try to inline functions that are not well-suited to inlining. */
4594 if (cg_edge->inline_failed)
4596 reason = cg_edge->inline_failed;
4597 /* If this call was originally indirect, we do not want to emit any
4598 inlining related warnings or sorry messages because there are no
4599 guarantees regarding those. */
4600 if (cg_edge->indirect_inlining_edge)
4601 goto egress;
4603 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4604 /* For extern inline functions that get redefined we always
4605 silently ignored always_inline flag. Better behavior would
4606 be to be able to keep both bodies and use extern inline body
4607 for inlining, but we can't do that because frontends overwrite
4608 the body. */
4609 && !cg_edge->callee->local.redefined_extern_inline
4610 /* During early inline pass, report only when optimization is
4611 not turned on. */
4612 && (symtab->global_info_ready
4613 || !optimize
4614 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4615 /* PR 20090218-1_0.c. Body can be provided by another module. */
4616 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4618 error ("inlining failed in call to always_inline %q+F: %s", fn,
4619 cgraph_inline_failed_string (reason));
4620 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4621 inform (gimple_location (stmt), "called from here");
4622 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4623 inform (DECL_SOURCE_LOCATION (cfun->decl),
4624 "called from this function");
4626 else if (warn_inline
4627 && DECL_DECLARED_INLINE_P (fn)
4628 && !DECL_NO_INLINE_WARNING_P (fn)
4629 && !DECL_IN_SYSTEM_HEADER (fn)
4630 && reason != CIF_UNSPECIFIED
4631 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4632 /* Do not warn about not inlined recursive calls. */
4633 && !cg_edge->recursive_p ()
4634 /* Avoid warnings during early inline pass. */
4635 && symtab->global_info_ready)
4637 auto_diagnostic_group d;
4638 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4639 fn, _(cgraph_inline_failed_string (reason))))
4641 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4642 inform (gimple_location (stmt), "called from here");
4643 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4644 inform (DECL_SOURCE_LOCATION (cfun->decl),
4645 "called from this function");
4648 goto egress;
4650 id->src_node = cg_edge->callee;
4652 /* If callee is thunk, all we need is to adjust the THIS pointer
4653 and redirect to function being thunked. */
4654 if (id->src_node->thunk.thunk_p)
4656 cgraph_edge *edge;
4657 tree virtual_offset = NULL;
4658 profile_count count = cg_edge->count;
4659 tree op;
4660 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4662 cg_edge->remove ();
4663 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4664 gimple_uid (stmt),
4665 profile_count::one (),
4666 profile_count::one (),
4667 true);
4668 edge->count = count;
4669 if (id->src_node->thunk.virtual_offset_p)
4670 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4671 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4672 NULL);
4673 gsi_insert_before (&iter, gimple_build_assign (op,
4674 gimple_call_arg (stmt, 0)),
4675 GSI_NEW_STMT);
4676 gcc_assert (id->src_node->thunk.this_adjusting);
4677 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4678 virtual_offset, id->src_node->thunk.indirect_offset);
4680 gimple_call_set_arg (stmt, 0, op);
4681 gimple_call_set_fndecl (stmt, edge->callee->decl);
4682 update_stmt (stmt);
4683 id->src_node->remove ();
4684 expand_call_inline (bb, stmt, id);
4685 maybe_remove_unused_call_args (cfun, stmt);
4686 return true;
4688 fn = cg_edge->callee->decl;
4689 cg_edge->callee->get_untransformed_body ();
4691 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4692 cg_edge->callee->verify ();
4694 /* We will be inlining this callee. */
4695 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4697 /* Update the callers EH personality. */
4698 if (DECL_FUNCTION_PERSONALITY (fn))
4699 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4700 = DECL_FUNCTION_PERSONALITY (fn);
4702 /* Split the block before the GIMPLE_CALL. */
4703 stmt_gsi = gsi_for_stmt (stmt);
4704 gsi_prev (&stmt_gsi);
4705 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4706 bb = e->src;
4707 return_block = e->dest;
4708 remove_edge (e);
4710 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4711 been the source of abnormal edges. In this case, schedule
4712 the removal of dead abnormal edges. */
4713 gsi = gsi_start_bb (return_block);
4714 gsi_next (&gsi);
4715 purge_dead_abnormal_edges = gsi_end_p (gsi);
4717 stmt_gsi = gsi_start_bb (return_block);
4719 /* Build a block containing code to initialize the arguments, the
4720 actual inline expansion of the body, and a label for the return
4721 statements within the function to jump to. The type of the
4722 statement expression is the return type of the function call.
4723 ??? If the call does not have an associated block then we will
4724 remap all callee blocks to NULL, effectively dropping most of
4725 its debug information. This should only happen for calls to
4726 artificial decls inserted by the compiler itself. We need to
4727 either link the inlined blocks into the caller block tree or
4728 not refer to them in any way to not break GC for locations. */
4729 if (tree block = gimple_block (stmt))
4731 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4732 to make inlined_function_outer_scope_p return true on this BLOCK. */
4733 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4734 if (loc == UNKNOWN_LOCATION)
4735 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4736 if (loc == UNKNOWN_LOCATION)
4737 loc = BUILTINS_LOCATION;
4738 id->block = make_node (BLOCK);
4739 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4740 BLOCK_SOURCE_LOCATION (id->block) = loc;
4741 prepend_lexical_block (block, id->block);
4744 /* Local declarations will be replaced by their equivalents in this map. */
4745 st = id->decl_map;
4746 id->decl_map = new hash_map<tree, tree>;
4747 dst = id->debug_map;
4748 id->debug_map = NULL;
4749 if (flag_stack_reuse != SR_NONE)
4750 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4752 /* Record the function we are about to inline. */
4753 id->src_fn = fn;
4754 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4755 id->reset_location = DECL_IGNORED_P (fn);
4756 id->call_stmt = call_stmt;
4758 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4759 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4760 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4761 simtvars_save = id->dst_simt_vars;
4762 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4763 && (simduid = bb->loop_father->simduid) != NULL_TREE
4764 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4765 && single_imm_use (simduid, &use, &simtenter_stmt)
4766 && is_gimple_call (simtenter_stmt)
4767 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4768 vec_alloc (id->dst_simt_vars, 0);
4769 else
4770 id->dst_simt_vars = NULL;
4772 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4773 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4775 /* If the src function contains an IFN_VA_ARG, then so will the dst
4776 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4777 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4778 src_properties = id->src_cfun->curr_properties & prop_mask;
4779 if (src_properties != prop_mask)
4780 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4782 gcc_assert (!id->src_cfun->after_inlining);
4784 id->entry_bb = bb;
4785 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4787 gimple_stmt_iterator si = gsi_last_bb (bb);
4788 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4789 NOT_TAKEN),
4790 GSI_NEW_STMT);
4792 initialize_inlined_parameters (id, stmt, fn, bb);
4793 if (debug_nonbind_markers_p && debug_inline_points && id->block
4794 && inlined_function_outer_scope_p (id->block))
4796 gimple_stmt_iterator si = gsi_last_bb (bb);
4797 gsi_insert_after (&si, gimple_build_debug_inline_entry
4798 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4799 GSI_NEW_STMT);
4802 if (DECL_INITIAL (fn))
4804 if (gimple_block (stmt))
4806 tree *var;
4808 prepend_lexical_block (id->block,
4809 remap_blocks (DECL_INITIAL (fn), id));
4810 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4811 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4812 == NULL_TREE));
4813 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4814 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4815 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4816 under it. The parameters can be then evaluated in the debugger,
4817 but don't show in backtraces. */
4818 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4819 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4821 tree v = *var;
4822 *var = TREE_CHAIN (v);
4823 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4824 BLOCK_VARS (id->block) = v;
4826 else
4827 var = &TREE_CHAIN (*var);
4829 else
4830 remap_blocks_to_null (DECL_INITIAL (fn), id);
4833 /* Return statements in the function body will be replaced by jumps
4834 to the RET_LABEL. */
4835 gcc_assert (DECL_INITIAL (fn));
4836 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4838 /* Find the LHS to which the result of this call is assigned. */
4839 return_slot = NULL;
4840 if (gimple_call_lhs (stmt))
4842 modify_dest = gimple_call_lhs (stmt);
4844 /* The function which we are inlining might not return a value,
4845 in which case we should issue a warning that the function
4846 does not return a value. In that case the optimizers will
4847 see that the variable to which the value is assigned was not
4848 initialized. We do not want to issue a warning about that
4849 uninitialized variable. */
4850 if (DECL_P (modify_dest))
4851 TREE_NO_WARNING (modify_dest) = 1;
4853 if (gimple_call_return_slot_opt_p (call_stmt))
4855 return_slot = modify_dest;
4856 modify_dest = NULL;
4859 else
4860 modify_dest = NULL;
4862 /* If we are inlining a call to the C++ operator new, we don't want
4863 to use type based alias analysis on the return value. Otherwise
4864 we may get confused if the compiler sees that the inlined new
4865 function returns a pointer which was just deleted. See bug
4866 33407. */
4867 if (DECL_IS_OPERATOR_NEW (fn))
4869 return_slot = NULL;
4870 modify_dest = NULL;
4873 /* Declare the return variable for the function. */
4874 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4876 /* Add local vars in this inlined callee to caller. */
4877 add_local_variables (id->src_cfun, cfun, id);
4879 if (dump_enabled_p ())
4881 char buf[128];
4882 snprintf (buf, sizeof(buf), "%4.2f",
4883 cg_edge->sreal_frequency ().to_double ());
4884 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
4885 call_stmt,
4886 "Inlining %C to %C with frequency %s\n",
4887 id->src_node, id->dst_node, buf);
4888 if (dump_file && (dump_flags & TDF_DETAILS))
4890 id->src_node->dump (dump_file);
4891 id->dst_node->dump (dump_file);
4895 /* This is it. Duplicate the callee body. Assume callee is
4896 pre-gimplified. Note that we must not alter the caller
4897 function in any way before this point, as this CALL_EXPR may be
4898 a self-referential call; if we're calling ourselves, we need to
4899 duplicate our body before altering anything. */
4900 copy_body (id, bb, return_block, NULL);
4902 reset_debug_bindings (id, stmt_gsi);
4904 if (flag_stack_reuse != SR_NONE)
4905 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4906 if (!TREE_THIS_VOLATILE (p))
4908 tree *varp = id->decl_map->get (p);
4909 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4911 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4912 gimple *clobber_stmt;
4913 TREE_THIS_VOLATILE (clobber) = 1;
4914 clobber_stmt = gimple_build_assign (*varp, clobber);
4915 gimple_set_location (clobber_stmt, gimple_location (stmt));
4916 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4920 /* Reset the escaped solution. */
4921 if (cfun->gimple_df)
4922 pt_solution_reset (&cfun->gimple_df->escaped);
4924 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4925 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4927 size_t nargs = gimple_call_num_args (simtenter_stmt);
4928 vec<tree> *vars = id->dst_simt_vars;
4929 auto_vec<tree> newargs (nargs + vars->length ());
4930 for (size_t i = 0; i < nargs; i++)
4931 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4932 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4934 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4935 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4937 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4938 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4939 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4940 gsi_replace (&gsi, g, false);
4942 vec_free (id->dst_simt_vars);
4943 id->dst_simt_vars = simtvars_save;
4945 /* Clean up. */
4946 if (id->debug_map)
4948 delete id->debug_map;
4949 id->debug_map = dst;
4951 delete id->decl_map;
4952 id->decl_map = st;
4954 /* Unlink the calls virtual operands before replacing it. */
4955 unlink_stmt_vdef (stmt);
4956 if (gimple_vdef (stmt)
4957 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4958 release_ssa_name (gimple_vdef (stmt));
4960 /* If the inlined function returns a result that we care about,
4961 substitute the GIMPLE_CALL with an assignment of the return
4962 variable to the LHS of the call. That is, if STMT was
4963 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4964 if (use_retvar && gimple_call_lhs (stmt))
4966 gimple *old_stmt = stmt;
4967 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4968 gimple_set_location (stmt, gimple_location (old_stmt));
4969 gsi_replace (&stmt_gsi, stmt, false);
4970 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4971 /* Append a clobber for id->retvar if easily possible. */
4972 if (flag_stack_reuse != SR_NONE
4973 && id->retvar
4974 && VAR_P (id->retvar)
4975 && id->retvar != return_slot
4976 && id->retvar != modify_dest
4977 && !TREE_THIS_VOLATILE (id->retvar)
4978 && !is_gimple_reg (id->retvar)
4979 && !stmt_ends_bb_p (stmt))
4981 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4982 gimple *clobber_stmt;
4983 TREE_THIS_VOLATILE (clobber) = 1;
4984 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4985 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4986 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4989 else
4991 /* Handle the case of inlining a function with no return
4992 statement, which causes the return value to become undefined. */
4993 if (gimple_call_lhs (stmt)
4994 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4996 tree name = gimple_call_lhs (stmt);
4997 tree var = SSA_NAME_VAR (name);
4998 tree def = var ? ssa_default_def (cfun, var) : NULL;
5000 if (def)
5002 /* If the variable is used undefined, make this name
5003 undefined via a move. */
5004 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5005 gsi_replace (&stmt_gsi, stmt, true);
5007 else
5009 if (!var)
5011 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5012 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5014 /* Otherwise make this variable undefined. */
5015 gsi_remove (&stmt_gsi, true);
5016 set_ssa_default_def (cfun, var, name);
5017 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5020 /* Replace with a clobber for id->retvar. */
5021 else if (flag_stack_reuse != SR_NONE
5022 && id->retvar
5023 && VAR_P (id->retvar)
5024 && id->retvar != return_slot
5025 && id->retvar != modify_dest
5026 && !TREE_THIS_VOLATILE (id->retvar)
5027 && !is_gimple_reg (id->retvar))
5029 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
5030 gimple *clobber_stmt;
5031 TREE_THIS_VOLATILE (clobber) = 1;
5032 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5033 gimple_set_location (clobber_stmt, gimple_location (stmt));
5034 gsi_replace (&stmt_gsi, clobber_stmt, false);
5035 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5037 else
5038 gsi_remove (&stmt_gsi, true);
5041 if (purge_dead_abnormal_edges)
5043 gimple_purge_dead_eh_edges (return_block);
5044 gimple_purge_dead_abnormal_call_edges (return_block);
5047 /* If the value of the new expression is ignored, that's OK. We
5048 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5049 the equivalent inlined version either. */
5050 if (is_gimple_assign (stmt))
5052 gcc_assert (gimple_assign_single_p (stmt)
5053 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5054 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5057 id->add_clobbers_to_eh_landing_pads = 0;
5059 /* Output the inlining info for this abstract function, since it has been
5060 inlined. If we don't do this now, we can lose the information about the
5061 variables in the function when the blocks get blown away as soon as we
5062 remove the cgraph node. */
5063 if (gimple_block (stmt))
5064 (*debug_hooks->outlining_inline_function) (fn);
5066 /* Update callgraph if needed. */
5067 cg_edge->callee->remove ();
5069 id->block = NULL_TREE;
5070 id->retvar = NULL_TREE;
5071 successfully_inlined = true;
5073 egress:
5074 input_location = saved_location;
5075 return successfully_inlined;
5078 /* Expand call statements reachable from STMT_P.
5079 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5080 in a MODIFY_EXPR. */
5082 static bool
5083 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
5085 gimple_stmt_iterator gsi;
5086 bool inlined = false;
5088 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5090 gimple *stmt = gsi_stmt (gsi);
5091 gsi_prev (&gsi);
5093 if (is_gimple_call (stmt)
5094 && !gimple_call_internal_p (stmt))
5095 inlined |= expand_call_inline (bb, stmt, id);
5098 return inlined;
5102 /* Walk all basic blocks created after FIRST and try to fold every statement
5103 in the STATEMENTS pointer set. */
5105 static void
5106 fold_marked_statements (int first, hash_set<gimple *> *statements)
5108 for (; first < last_basic_block_for_fn (cfun); first++)
5109 if (BASIC_BLOCK_FOR_FN (cfun, first))
5111 gimple_stmt_iterator gsi;
5113 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5114 !gsi_end_p (gsi);
5115 gsi_next (&gsi))
5116 if (statements->contains (gsi_stmt (gsi)))
5118 gimple *old_stmt = gsi_stmt (gsi);
5119 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5121 if (old_decl && fndecl_built_in_p (old_decl))
5123 /* Folding builtins can create multiple instructions,
5124 we need to look at all of them. */
5125 gimple_stmt_iterator i2 = gsi;
5126 gsi_prev (&i2);
5127 if (fold_stmt (&gsi))
5129 gimple *new_stmt;
5130 /* If a builtin at the end of a bb folded into nothing,
5131 the following loop won't work. */
5132 if (gsi_end_p (gsi))
5134 cgraph_update_edges_for_call_stmt (old_stmt,
5135 old_decl, NULL);
5136 break;
5138 if (gsi_end_p (i2))
5139 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5140 else
5141 gsi_next (&i2);
5142 while (1)
5144 new_stmt = gsi_stmt (i2);
5145 update_stmt (new_stmt);
5146 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5147 new_stmt);
5149 if (new_stmt == gsi_stmt (gsi))
5151 /* It is okay to check only for the very last
5152 of these statements. If it is a throwing
5153 statement nothing will change. If it isn't
5154 this can remove EH edges. If that weren't
5155 correct then because some intermediate stmts
5156 throw, but not the last one. That would mean
5157 we'd have to split the block, which we can't
5158 here and we'd loose anyway. And as builtins
5159 probably never throw, this all
5160 is mood anyway. */
5161 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5162 new_stmt))
5163 gimple_purge_dead_eh_edges (
5164 BASIC_BLOCK_FOR_FN (cfun, first));
5165 break;
5167 gsi_next (&i2);
5171 else if (fold_stmt (&gsi))
5173 /* Re-read the statement from GSI as fold_stmt() may
5174 have changed it. */
5175 gimple *new_stmt = gsi_stmt (gsi);
5176 update_stmt (new_stmt);
5178 if (is_gimple_call (old_stmt)
5179 || is_gimple_call (new_stmt))
5180 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5181 new_stmt);
5183 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5184 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5185 first));
5191 /* Expand calls to inline functions in the body of FN. */
5193 unsigned int
5194 optimize_inline_calls (tree fn)
5196 copy_body_data id;
5197 basic_block bb;
5198 int last = n_basic_blocks_for_fn (cfun);
5199 bool inlined_p = false;
5201 /* Clear out ID. */
5202 memset (&id, 0, sizeof (id));
5204 id.src_node = id.dst_node = cgraph_node::get (fn);
5205 gcc_assert (id.dst_node->definition);
5206 id.dst_fn = fn;
5207 /* Or any functions that aren't finished yet. */
5208 if (current_function_decl)
5209 id.dst_fn = current_function_decl;
5211 id.copy_decl = copy_decl_maybe_to_var;
5212 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5213 id.transform_new_cfg = false;
5214 id.transform_return_to_modify = true;
5215 id.transform_parameter = true;
5216 id.transform_lang_insert_block = NULL;
5217 id.statements_to_fold = new hash_set<gimple *>;
5219 push_gimplify_context ();
5221 /* We make no attempts to keep dominance info up-to-date. */
5222 free_dominance_info (CDI_DOMINATORS);
5223 free_dominance_info (CDI_POST_DOMINATORS);
5225 /* Register specific gimple functions. */
5226 gimple_register_cfg_hooks ();
5228 /* Reach the trees by walking over the CFG, and note the
5229 enclosing basic-blocks in the call edges. */
5230 /* We walk the blocks going forward, because inlined function bodies
5231 will split id->current_basic_block, and the new blocks will
5232 follow it; we'll trudge through them, processing their CALL_EXPRs
5233 along the way. */
5234 FOR_EACH_BB_FN (bb, cfun)
5235 inlined_p |= gimple_expand_calls_inline (bb, &id);
5237 pop_gimplify_context (NULL);
5239 if (flag_checking)
5241 struct cgraph_edge *e;
5243 id.dst_node->verify ();
5245 /* Double check that we inlined everything we are supposed to inline. */
5246 for (e = id.dst_node->callees; e; e = e->next_callee)
5247 gcc_assert (e->inline_failed);
5250 /* Fold queued statements. */
5251 update_max_bb_count ();
5252 fold_marked_statements (last, id.statements_to_fold);
5253 delete id.statements_to_fold;
5255 gcc_assert (!id.debug_stmts.exists ());
5257 /* If we didn't inline into the function there is nothing to do. */
5258 if (!inlined_p)
5259 return 0;
5261 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5262 number_blocks (fn);
5264 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5266 if (flag_checking)
5267 id.dst_node->verify ();
5269 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5270 not possible yet - the IPA passes might make various functions to not
5271 throw and they don't care to proactively update local EH info. This is
5272 done later in fixup_cfg pass that also execute the verification. */
5273 return (TODO_update_ssa
5274 | TODO_cleanup_cfg
5275 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5276 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5277 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5278 ? TODO_rebuild_frequencies : 0));
5281 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5283 tree
5284 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5286 enum tree_code code = TREE_CODE (*tp);
5287 enum tree_code_class cl = TREE_CODE_CLASS (code);
5289 /* We make copies of most nodes. */
5290 if (IS_EXPR_CODE_CLASS (cl)
5291 || code == TREE_LIST
5292 || code == TREE_VEC
5293 || code == TYPE_DECL
5294 || code == OMP_CLAUSE)
5296 /* Because the chain gets clobbered when we make a copy, we save it
5297 here. */
5298 tree chain = NULL_TREE, new_tree;
5300 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5301 chain = TREE_CHAIN (*tp);
5303 /* Copy the node. */
5304 new_tree = copy_node (*tp);
5306 *tp = new_tree;
5308 /* Now, restore the chain, if appropriate. That will cause
5309 walk_tree to walk into the chain as well. */
5310 if (code == PARM_DECL
5311 || code == TREE_LIST
5312 || code == OMP_CLAUSE)
5313 TREE_CHAIN (*tp) = chain;
5315 /* For now, we don't update BLOCKs when we make copies. So, we
5316 have to nullify all BIND_EXPRs. */
5317 if (TREE_CODE (*tp) == BIND_EXPR)
5318 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5320 else if (code == CONSTRUCTOR)
5322 /* CONSTRUCTOR nodes need special handling because
5323 we need to duplicate the vector of elements. */
5324 tree new_tree;
5326 new_tree = copy_node (*tp);
5327 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5328 *tp = new_tree;
5330 else if (code == STATEMENT_LIST)
5331 /* We used to just abort on STATEMENT_LIST, but we can run into them
5332 with statement-expressions (c++/40975). */
5333 copy_statement_list (tp);
5334 else if (TREE_CODE_CLASS (code) == tcc_type)
5335 *walk_subtrees = 0;
5336 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5337 *walk_subtrees = 0;
5338 else if (TREE_CODE_CLASS (code) == tcc_constant)
5339 *walk_subtrees = 0;
5340 return NULL_TREE;
5343 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5344 information indicating to what new SAVE_EXPR this one should be mapped,
5345 use that one. Otherwise, create a new node and enter it in ST. FN is
5346 the function into which the copy will be placed. */
5348 static void
5349 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5351 tree *n;
5352 tree t;
5354 /* See if we already encountered this SAVE_EXPR. */
5355 n = st->get (*tp);
5357 /* If we didn't already remap this SAVE_EXPR, do so now. */
5358 if (!n)
5360 t = copy_node (*tp);
5362 /* Remember this SAVE_EXPR. */
5363 st->put (*tp, t);
5364 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5365 st->put (t, t);
5367 else
5369 /* We've already walked into this SAVE_EXPR; don't do it again. */
5370 *walk_subtrees = 0;
5371 t = *n;
5374 /* Replace this SAVE_EXPR with the copy. */
5375 *tp = t;
5378 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5379 label, copies the declaration and enters it in the splay_tree in DATA (which
5380 is really a 'copy_body_data *'. */
5382 static tree
5383 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5384 bool *handled_ops_p ATTRIBUTE_UNUSED,
5385 struct walk_stmt_info *wi)
5387 copy_body_data *id = (copy_body_data *) wi->info;
5388 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5390 if (stmt)
5392 tree decl = gimple_label_label (stmt);
5394 /* Copy the decl and remember the copy. */
5395 insert_decl_map (id, decl, id->copy_decl (decl, id));
5398 return NULL_TREE;
5401 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5402 struct walk_stmt_info *wi);
5404 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5405 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5406 remaps all local declarations to appropriate replacements in gimple
5407 operands. */
5409 static tree
5410 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5412 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5413 copy_body_data *id = (copy_body_data *) wi->info;
5414 hash_map<tree, tree> *st = id->decl_map;
5415 tree *n;
5416 tree expr = *tp;
5418 /* For recursive invocations this is no longer the LHS itself. */
5419 bool is_lhs = wi->is_lhs;
5420 wi->is_lhs = false;
5422 if (TREE_CODE (expr) == SSA_NAME)
5424 *tp = remap_ssa_name (*tp, id);
5425 *walk_subtrees = 0;
5426 if (is_lhs)
5427 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5429 /* Only a local declaration (variable or label). */
5430 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5431 || TREE_CODE (expr) == LABEL_DECL)
5433 /* Lookup the declaration. */
5434 n = st->get (expr);
5436 /* If it's there, remap it. */
5437 if (n)
5438 *tp = *n;
5439 *walk_subtrees = 0;
5441 else if (TREE_CODE (expr) == STATEMENT_LIST
5442 || TREE_CODE (expr) == BIND_EXPR
5443 || TREE_CODE (expr) == SAVE_EXPR)
5444 gcc_unreachable ();
5445 else if (TREE_CODE (expr) == TARGET_EXPR)
5447 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5448 It's OK for this to happen if it was part of a subtree that
5449 isn't immediately expanded, such as operand 2 of another
5450 TARGET_EXPR. */
5451 if (!TREE_OPERAND (expr, 1))
5453 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5454 TREE_OPERAND (expr, 3) = NULL_TREE;
5457 else if (TREE_CODE (expr) == OMP_CLAUSE)
5459 /* Before the omplower pass completes, some OMP clauses can contain
5460 sequences that are neither copied by gimple_seq_copy nor walked by
5461 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5462 in those situations, we have to copy and process them explicitely. */
5464 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5466 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5467 seq = duplicate_remap_omp_clause_seq (seq, wi);
5468 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5470 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5472 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5473 seq = duplicate_remap_omp_clause_seq (seq, wi);
5474 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5476 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5478 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5479 seq = duplicate_remap_omp_clause_seq (seq, wi);
5480 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5481 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5482 seq = duplicate_remap_omp_clause_seq (seq, wi);
5483 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5487 /* Keep iterating. */
5488 return NULL_TREE;
5492 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5493 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5494 remaps all local declarations to appropriate replacements in gimple
5495 statements. */
5497 static tree
5498 replace_locals_stmt (gimple_stmt_iterator *gsip,
5499 bool *handled_ops_p ATTRIBUTE_UNUSED,
5500 struct walk_stmt_info *wi)
5502 copy_body_data *id = (copy_body_data *) wi->info;
5503 gimple *gs = gsi_stmt (*gsip);
5505 if (gbind *stmt = dyn_cast <gbind *> (gs))
5507 tree block = gimple_bind_block (stmt);
5509 if (block)
5511 remap_block (&block, id);
5512 gimple_bind_set_block (stmt, block);
5515 /* This will remap a lot of the same decls again, but this should be
5516 harmless. */
5517 if (gimple_bind_vars (stmt))
5519 tree old_var, decls = gimple_bind_vars (stmt);
5521 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5522 if (!can_be_nonlocal (old_var, id)
5523 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5524 remap_decl (old_var, id);
5526 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5527 id->prevent_decl_creation_for_types = true;
5528 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5529 id->prevent_decl_creation_for_types = false;
5533 /* Keep iterating. */
5534 return NULL_TREE;
5537 /* Create a copy of SEQ and remap all decls in it. */
5539 static gimple_seq
5540 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5542 if (!seq)
5543 return NULL;
5545 /* If there are any labels in OMP sequences, they can be only referred to in
5546 the sequence itself and therefore we can do both here. */
5547 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5548 gimple_seq copy = gimple_seq_copy (seq);
5549 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5550 return copy;
5553 /* Copies everything in SEQ and replaces variables and labels local to
5554 current_function_decl. */
5556 gimple_seq
5557 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5559 copy_body_data id;
5560 struct walk_stmt_info wi;
5561 gimple_seq copy;
5563 /* There's nothing to do for NULL_TREE. */
5564 if (seq == NULL)
5565 return seq;
5567 /* Set up ID. */
5568 memset (&id, 0, sizeof (id));
5569 id.src_fn = current_function_decl;
5570 id.dst_fn = current_function_decl;
5571 id.src_cfun = cfun;
5572 id.decl_map = new hash_map<tree, tree>;
5573 id.debug_map = NULL;
5575 id.copy_decl = copy_decl_no_change;
5576 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5577 id.transform_new_cfg = false;
5578 id.transform_return_to_modify = false;
5579 id.transform_parameter = false;
5580 id.transform_lang_insert_block = NULL;
5582 /* Walk the tree once to find local labels. */
5583 memset (&wi, 0, sizeof (wi));
5584 hash_set<tree> visited;
5585 wi.info = &id;
5586 wi.pset = &visited;
5587 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5589 copy = gimple_seq_copy (seq);
5591 /* Walk the copy, remapping decls. */
5592 memset (&wi, 0, sizeof (wi));
5593 wi.info = &id;
5594 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5596 /* Clean up. */
5597 delete id.decl_map;
5598 if (id.debug_map)
5599 delete id.debug_map;
5600 if (id.dependence_map)
5602 delete id.dependence_map;
5603 id.dependence_map = NULL;
5606 return copy;
5610 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5612 static tree
5613 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5615 if (*tp == data)
5616 return (tree) data;
5617 else
5618 return NULL;
5621 DEBUG_FUNCTION bool
5622 debug_find_tree (tree top, tree search)
5624 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5628 /* Declare the variables created by the inliner. Add all the variables in
5629 VARS to BIND_EXPR. */
5631 static void
5632 declare_inline_vars (tree block, tree vars)
5634 tree t;
5635 for (t = vars; t; t = DECL_CHAIN (t))
5637 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5638 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5639 add_local_decl (cfun, t);
5642 if (block)
5643 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5646 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5647 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5648 VAR_DECL translation. */
5650 tree
5651 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5653 /* Don't generate debug information for the copy if we wouldn't have
5654 generated it for the copy either. */
5655 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5656 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5658 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5659 declaration inspired this copy. */
5660 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5662 /* The new variable/label has no RTL, yet. */
5663 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5664 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5665 SET_DECL_RTL (copy, 0);
5666 /* For vector typed decls make sure to update DECL_MODE according
5667 to the new function context. */
5668 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5669 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5671 /* These args would always appear unused, if not for this. */
5672 TREE_USED (copy) = 1;
5674 /* Set the context for the new declaration. */
5675 if (!DECL_CONTEXT (decl))
5676 /* Globals stay global. */
5678 else if (DECL_CONTEXT (decl) != id->src_fn)
5679 /* Things that weren't in the scope of the function we're inlining
5680 from aren't in the scope we're inlining to, either. */
5682 else if (TREE_STATIC (decl))
5683 /* Function-scoped static variables should stay in the original
5684 function. */
5686 else
5688 /* Ordinary automatic local variables are now in the scope of the
5689 new function. */
5690 DECL_CONTEXT (copy) = id->dst_fn;
5691 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5693 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5694 DECL_ATTRIBUTES (copy)
5695 = tree_cons (get_identifier ("omp simt private"), NULL,
5696 DECL_ATTRIBUTES (copy));
5697 id->dst_simt_vars->safe_push (copy);
5701 return copy;
5704 static tree
5705 copy_decl_to_var (tree decl, copy_body_data *id)
5707 tree copy, type;
5709 gcc_assert (TREE_CODE (decl) == PARM_DECL
5710 || TREE_CODE (decl) == RESULT_DECL);
5712 type = TREE_TYPE (decl);
5714 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5715 VAR_DECL, DECL_NAME (decl), type);
5716 if (DECL_PT_UID_SET_P (decl))
5717 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5718 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5719 TREE_READONLY (copy) = TREE_READONLY (decl);
5720 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5721 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5723 return copy_decl_for_dup_finish (id, decl, copy);
5726 /* Like copy_decl_to_var, but create a return slot object instead of a
5727 pointer variable for return by invisible reference. */
5729 static tree
5730 copy_result_decl_to_var (tree decl, copy_body_data *id)
5732 tree copy, type;
5734 gcc_assert (TREE_CODE (decl) == PARM_DECL
5735 || TREE_CODE (decl) == RESULT_DECL);
5737 type = TREE_TYPE (decl);
5738 if (DECL_BY_REFERENCE (decl))
5739 type = TREE_TYPE (type);
5741 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5742 VAR_DECL, DECL_NAME (decl), type);
5743 if (DECL_PT_UID_SET_P (decl))
5744 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5745 TREE_READONLY (copy) = TREE_READONLY (decl);
5746 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5747 if (!DECL_BY_REFERENCE (decl))
5749 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5750 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5753 return copy_decl_for_dup_finish (id, decl, copy);
5756 tree
5757 copy_decl_no_change (tree decl, copy_body_data *id)
5759 tree copy;
5761 copy = copy_node (decl);
5763 /* The COPY is not abstract; it will be generated in DST_FN. */
5764 DECL_ABSTRACT_P (copy) = false;
5765 lang_hooks.dup_lang_specific_decl (copy);
5767 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5768 been taken; it's for internal bookkeeping in expand_goto_internal. */
5769 if (TREE_CODE (copy) == LABEL_DECL)
5771 TREE_ADDRESSABLE (copy) = 0;
5772 LABEL_DECL_UID (copy) = -1;
5775 return copy_decl_for_dup_finish (id, decl, copy);
5778 static tree
5779 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5781 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5782 return copy_decl_to_var (decl, id);
5783 else
5784 return copy_decl_no_change (decl, id);
5787 /* Return a copy of the function's argument tree. */
5788 static tree
5789 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5790 bitmap args_to_skip, tree *vars)
5792 tree arg, *parg;
5793 tree new_parm = NULL;
5794 int i = 0;
5796 parg = &new_parm;
5798 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5799 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5801 tree new_tree = remap_decl (arg, id);
5802 if (TREE_CODE (new_tree) != PARM_DECL)
5803 new_tree = id->copy_decl (arg, id);
5804 lang_hooks.dup_lang_specific_decl (new_tree);
5805 *parg = new_tree;
5806 parg = &DECL_CHAIN (new_tree);
5808 else if (!id->decl_map->get (arg))
5810 /* Make an equivalent VAR_DECL. If the argument was used
5811 as temporary variable later in function, the uses will be
5812 replaced by local variable. */
5813 tree var = copy_decl_to_var (arg, id);
5814 insert_decl_map (id, arg, var);
5815 /* Declare this new variable. */
5816 DECL_CHAIN (var) = *vars;
5817 *vars = var;
5819 return new_parm;
5822 /* Return a copy of the function's static chain. */
5823 static tree
5824 copy_static_chain (tree static_chain, copy_body_data * id)
5826 tree *chain_copy, *pvar;
5828 chain_copy = &static_chain;
5829 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5831 tree new_tree = remap_decl (*pvar, id);
5832 lang_hooks.dup_lang_specific_decl (new_tree);
5833 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5834 *pvar = new_tree;
5836 return static_chain;
5839 /* Return true if the function is allowed to be versioned.
5840 This is a guard for the versioning functionality. */
5842 bool
5843 tree_versionable_function_p (tree fndecl)
5845 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5846 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5849 /* Update clone info after duplication. */
5851 static void
5852 update_clone_info (copy_body_data * id)
5854 struct cgraph_node *node;
5855 if (!id->dst_node->clones)
5856 return;
5857 for (node = id->dst_node->clones; node != id->dst_node;)
5859 /* First update replace maps to match the new body. */
5860 if (node->clone.tree_map)
5862 unsigned int i;
5863 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5865 struct ipa_replace_map *replace_info;
5866 replace_info = (*node->clone.tree_map)[i];
5867 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5868 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5871 if (node->clones)
5872 node = node->clones;
5873 else if (node->next_sibling_clone)
5874 node = node->next_sibling_clone;
5875 else
5877 while (node != id->dst_node && !node->next_sibling_clone)
5878 node = node->clone_of;
5879 if (node != id->dst_node)
5880 node = node->next_sibling_clone;
5885 /* Create a copy of a function's tree.
5886 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5887 of the original function and the new copied function
5888 respectively. In case we want to replace a DECL
5889 tree with another tree while duplicating the function's
5890 body, TREE_MAP represents the mapping between these
5891 trees. If UPDATE_CLONES is set, the call_stmt fields
5892 of edges of clones of the function will be updated.
5894 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5895 from new version.
5896 If SKIP_RETURN is true, the new version will return void.
5897 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5898 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5900 void
5901 tree_function_versioning (tree old_decl, tree new_decl,
5902 vec<ipa_replace_map *, va_gc> *tree_map,
5903 bool update_clones, bitmap args_to_skip,
5904 bool skip_return, bitmap blocks_to_copy,
5905 basic_block new_entry)
5907 struct cgraph_node *old_version_node;
5908 struct cgraph_node *new_version_node;
5909 copy_body_data id;
5910 tree p;
5911 unsigned i;
5912 struct ipa_replace_map *replace_info;
5913 basic_block old_entry_block, bb;
5914 auto_vec<gimple *, 10> init_stmts;
5915 tree vars = NULL_TREE;
5916 bitmap debug_args_to_skip = args_to_skip;
5918 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5919 && TREE_CODE (new_decl) == FUNCTION_DECL);
5920 DECL_POSSIBLY_INLINED (old_decl) = 1;
5922 old_version_node = cgraph_node::get (old_decl);
5923 gcc_checking_assert (old_version_node);
5924 new_version_node = cgraph_node::get (new_decl);
5925 gcc_checking_assert (new_version_node);
5927 /* Copy over debug args. */
5928 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5930 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5931 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5932 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5933 old_debug_args = decl_debug_args_lookup (old_decl);
5934 if (old_debug_args)
5936 new_debug_args = decl_debug_args_insert (new_decl);
5937 *new_debug_args = vec_safe_copy (*old_debug_args);
5941 /* Output the inlining info for this abstract function, since it has been
5942 inlined. If we don't do this now, we can lose the information about the
5943 variables in the function when the blocks get blown away as soon as we
5944 remove the cgraph node. */
5945 (*debug_hooks->outlining_inline_function) (old_decl);
5947 DECL_ARTIFICIAL (new_decl) = 1;
5948 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5949 if (DECL_ORIGIN (old_decl) == old_decl)
5950 old_version_node->used_as_abstract_origin = true;
5951 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5953 /* Prepare the data structures for the tree copy. */
5954 memset (&id, 0, sizeof (id));
5956 /* Generate a new name for the new version. */
5957 id.statements_to_fold = new hash_set<gimple *>;
5959 id.decl_map = new hash_map<tree, tree>;
5960 id.debug_map = NULL;
5961 id.src_fn = old_decl;
5962 id.dst_fn = new_decl;
5963 id.src_node = old_version_node;
5964 id.dst_node = new_version_node;
5965 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5966 id.blocks_to_copy = blocks_to_copy;
5968 id.copy_decl = copy_decl_no_change;
5969 id.transform_call_graph_edges
5970 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5971 id.transform_new_cfg = true;
5972 id.transform_return_to_modify = false;
5973 id.transform_parameter = false;
5974 id.transform_lang_insert_block = NULL;
5976 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5977 (DECL_STRUCT_FUNCTION (old_decl));
5978 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5979 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5980 initialize_cfun (new_decl, old_decl,
5981 new_entry ? new_entry->count : old_entry_block->count);
5982 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5983 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5984 = id.src_cfun->gimple_df->ipa_pta;
5986 /* Copy the function's static chain. */
5987 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5988 if (p)
5989 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5990 = copy_static_chain (p, &id);
5992 /* If there's a tree_map, prepare for substitution. */
5993 if (tree_map)
5994 for (i = 0; i < tree_map->length (); i++)
5996 gimple *init;
5997 replace_info = (*tree_map)[i];
5998 if (replace_info->replace_p)
6000 int parm_num = -1;
6001 if (!replace_info->old_tree)
6003 int p = replace_info->parm_num;
6004 tree parm;
6005 tree req_type, new_type;
6007 for (parm = DECL_ARGUMENTS (old_decl); p;
6008 parm = DECL_CHAIN (parm))
6009 p--;
6010 replace_info->old_tree = parm;
6011 parm_num = replace_info->parm_num;
6012 req_type = TREE_TYPE (parm);
6013 new_type = TREE_TYPE (replace_info->new_tree);
6014 if (!useless_type_conversion_p (req_type, new_type))
6016 if (fold_convertible_p (req_type, replace_info->new_tree))
6017 replace_info->new_tree
6018 = fold_build1 (NOP_EXPR, req_type,
6019 replace_info->new_tree);
6020 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6021 replace_info->new_tree
6022 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
6023 replace_info->new_tree);
6024 else
6026 if (dump_file)
6028 fprintf (dump_file, " const ");
6029 print_generic_expr (dump_file,
6030 replace_info->new_tree);
6031 fprintf (dump_file,
6032 " can't be converted to param ");
6033 print_generic_expr (dump_file, parm);
6034 fprintf (dump_file, "\n");
6036 replace_info->old_tree = NULL;
6040 else
6041 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
6042 if (replace_info->old_tree)
6044 init = setup_one_parameter (&id, replace_info->old_tree,
6045 replace_info->new_tree, id.src_fn,
6046 NULL,
6047 &vars);
6048 if (init)
6049 init_stmts.safe_push (init);
6050 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
6052 if (parm_num == -1)
6054 tree parm;
6055 int p;
6056 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
6057 parm = DECL_CHAIN (parm), p++)
6058 if (parm == replace_info->old_tree)
6060 parm_num = p;
6061 break;
6064 if (parm_num != -1)
6066 if (debug_args_to_skip == args_to_skip)
6068 debug_args_to_skip = BITMAP_ALLOC (NULL);
6069 bitmap_copy (debug_args_to_skip, args_to_skip);
6071 bitmap_clear_bit (debug_args_to_skip, parm_num);
6077 /* Copy the function's arguments. */
6078 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6079 DECL_ARGUMENTS (new_decl)
6080 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6081 args_to_skip, &vars);
6083 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6084 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6086 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6088 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6089 /* Add local vars. */
6090 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6092 if (DECL_RESULT (old_decl) == NULL_TREE)
6094 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6096 DECL_RESULT (new_decl)
6097 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6098 RESULT_DECL, NULL_TREE, void_type_node);
6099 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6100 cfun->returns_struct = 0;
6101 cfun->returns_pcc_struct = 0;
6103 else
6105 tree old_name;
6106 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6107 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6108 if (gimple_in_ssa_p (id.src_cfun)
6109 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6110 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6112 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6113 insert_decl_map (&id, old_name, new_name);
6114 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6115 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6119 /* Set up the destination functions loop tree. */
6120 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6122 cfun->curr_properties &= ~PROP_loops;
6123 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6124 cfun->curr_properties |= PROP_loops;
6127 /* Copy the Function's body. */
6128 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6129 new_entry);
6131 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6132 number_blocks (new_decl);
6134 /* We want to create the BB unconditionally, so that the addition of
6135 debug stmts doesn't affect BB count, which may in the end cause
6136 codegen differences. */
6137 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6138 while (init_stmts.length ())
6139 insert_init_stmt (&id, bb, init_stmts.pop ());
6140 update_clone_info (&id);
6142 /* Remap the nonlocal_goto_save_area, if any. */
6143 if (cfun->nonlocal_goto_save_area)
6145 struct walk_stmt_info wi;
6147 memset (&wi, 0, sizeof (wi));
6148 wi.info = &id;
6149 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6152 /* Clean up. */
6153 delete id.decl_map;
6154 if (id.debug_map)
6155 delete id.debug_map;
6156 free_dominance_info (CDI_DOMINATORS);
6157 free_dominance_info (CDI_POST_DOMINATORS);
6159 update_max_bb_count ();
6160 fold_marked_statements (0, id.statements_to_fold);
6161 delete id.statements_to_fold;
6162 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6163 if (id.dst_node->definition)
6164 cgraph_edge::rebuild_references ();
6165 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6167 calculate_dominance_info (CDI_DOMINATORS);
6168 fix_loop_structure (NULL);
6170 update_ssa (TODO_update_ssa);
6172 /* After partial cloning we need to rescale frequencies, so they are
6173 within proper range in the cloned function. */
6174 if (new_entry)
6176 struct cgraph_edge *e;
6177 rebuild_frequencies ();
6179 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6180 for (e = new_version_node->callees; e; e = e->next_callee)
6182 basic_block bb = gimple_bb (e->call_stmt);
6183 e->count = bb->count;
6185 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6187 basic_block bb = gimple_bb (e->call_stmt);
6188 e->count = bb->count;
6192 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6194 tree parm;
6195 vec<tree, va_gc> **debug_args = NULL;
6196 unsigned int len = 0;
6197 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6198 parm; parm = DECL_CHAIN (parm), i++)
6199 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6201 tree ddecl;
6203 if (debug_args == NULL)
6205 debug_args = decl_debug_args_insert (new_decl);
6206 len = vec_safe_length (*debug_args);
6208 ddecl = make_node (DEBUG_EXPR_DECL);
6209 DECL_ARTIFICIAL (ddecl) = 1;
6210 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6211 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6212 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6213 vec_safe_push (*debug_args, ddecl);
6215 if (debug_args != NULL)
6217 /* On the callee side, add
6218 DEBUG D#Y s=> parm
6219 DEBUG var => D#Y
6220 stmts to the first bb where var is a VAR_DECL created for the
6221 optimized away parameter in DECL_INITIAL block. This hints
6222 in the debug info that var (whole DECL_ORIGIN is the parm
6223 PARM_DECL) is optimized away, but could be looked up at the
6224 call site as value of D#X there. */
6225 tree var = vars, vexpr;
6226 gimple_stmt_iterator cgsi
6227 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6228 gimple *def_temp;
6229 var = vars;
6230 i = vec_safe_length (*debug_args);
6233 i -= 2;
6234 while (var != NULL_TREE
6235 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6236 var = TREE_CHAIN (var);
6237 if (var == NULL_TREE)
6238 break;
6239 vexpr = make_node (DEBUG_EXPR_DECL);
6240 parm = (**debug_args)[i];
6241 DECL_ARTIFICIAL (vexpr) = 1;
6242 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6243 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6244 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6245 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6246 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6247 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6249 while (i > len);
6253 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6254 BITMAP_FREE (debug_args_to_skip);
6255 free_dominance_info (CDI_DOMINATORS);
6256 free_dominance_info (CDI_POST_DOMINATORS);
6258 gcc_assert (!id.debug_stmts.exists ());
6259 pop_cfun ();
6260 return;
6263 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6264 the callee and return the inlined body on success. */
6266 tree
6267 maybe_inline_call_in_expr (tree exp)
6269 tree fn = get_callee_fndecl (exp);
6271 /* We can only try to inline "const" functions. */
6272 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6274 call_expr_arg_iterator iter;
6275 copy_body_data id;
6276 tree param, arg, t;
6277 hash_map<tree, tree> decl_map;
6279 /* Remap the parameters. */
6280 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6281 param;
6282 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6283 decl_map.put (param, arg);
6285 memset (&id, 0, sizeof (id));
6286 id.src_fn = fn;
6287 id.dst_fn = current_function_decl;
6288 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6289 id.decl_map = &decl_map;
6291 id.copy_decl = copy_decl_no_change;
6292 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6293 id.transform_new_cfg = false;
6294 id.transform_return_to_modify = true;
6295 id.transform_parameter = true;
6296 id.transform_lang_insert_block = NULL;
6298 /* Make sure not to unshare trees behind the front-end's back
6299 since front-end specific mechanisms may rely on sharing. */
6300 id.regimplify = false;
6301 id.do_not_unshare = true;
6303 /* We're not inside any EH region. */
6304 id.eh_lp_nr = 0;
6306 t = copy_tree_body (&id);
6308 /* We can only return something suitable for use in a GENERIC
6309 expression tree. */
6310 if (TREE_CODE (t) == MODIFY_EXPR)
6311 return TREE_OPERAND (t, 1);
6314 return NULL_TREE;
6317 /* Duplicate a type, fields and all. */
6319 tree
6320 build_duplicate_type (tree type)
6322 struct copy_body_data id;
6324 memset (&id, 0, sizeof (id));
6325 id.src_fn = current_function_decl;
6326 id.dst_fn = current_function_decl;
6327 id.src_cfun = cfun;
6328 id.decl_map = new hash_map<tree, tree>;
6329 id.debug_map = NULL;
6330 id.copy_decl = copy_decl_no_change;
6332 type = remap_type_1 (type, &id);
6334 delete id.decl_map;
6335 if (id.debug_map)
6336 delete id.debug_map;
6338 TYPE_CANONICAL (type) = type;
6340 return type;
6343 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6344 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6345 evaluation. */
6347 tree
6348 copy_fn (tree fn, tree& parms, tree& result)
6350 copy_body_data id;
6351 tree param;
6352 hash_map<tree, tree> decl_map;
6354 tree *p = &parms;
6355 *p = NULL_TREE;
6357 memset (&id, 0, sizeof (id));
6358 id.src_fn = fn;
6359 id.dst_fn = current_function_decl;
6360 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6361 id.decl_map = &decl_map;
6363 id.copy_decl = copy_decl_no_change;
6364 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6365 id.transform_new_cfg = false;
6366 id.transform_return_to_modify = false;
6367 id.transform_parameter = true;
6368 id.transform_lang_insert_block = NULL;
6370 /* Make sure not to unshare trees behind the front-end's back
6371 since front-end specific mechanisms may rely on sharing. */
6372 id.regimplify = false;
6373 id.do_not_unshare = true;
6374 id.do_not_fold = true;
6376 /* We're not inside any EH region. */
6377 id.eh_lp_nr = 0;
6379 /* Remap the parameters and result and return them to the caller. */
6380 for (param = DECL_ARGUMENTS (fn);
6381 param;
6382 param = DECL_CHAIN (param))
6384 *p = remap_decl (param, &id);
6385 p = &DECL_CHAIN (*p);
6388 if (DECL_RESULT (fn))
6389 result = remap_decl (DECL_RESULT (fn), &id);
6390 else
6391 result = NULL_TREE;
6393 return copy_tree_body (&id);