Analyze niter for until-wrap condition [PR101145]
[official-gcc.git] / gcc / ipa-split.c
blobc68577d04a9940380c813dfb1821bc4b62bc4c1f
1 /* Function splitting pass
2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka <jh@suse.cz>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* The purpose of this pass is to split function bodies to improve
22 inlining. I.e. for function of the form:
24 func (...)
26 if (cheap_test)
27 something_small
28 else
29 something_big
32 Produce:
34 func.part (...)
36 something_big
39 func (...)
41 if (cheap_test)
42 something_small
43 else
44 func.part (...);
47 When func becomes inlinable and when cheap_test is often true, inlining func,
48 but not fund.part leads to performance improvement similar as inlining
49 original func while the code size growth is smaller.
51 The pass is organized in three stages:
52 1) Collect local info about basic block into BB_INFO structure and
53 compute function body estimated size and time.
54 2) Via DFS walk find all possible basic blocks where we can split
55 and chose best one.
56 3) If split point is found, split at the specified BB by creating a clone
57 and updating function to call it.
59 The decisions what functions to split are in execute_split_functions
60 and consider_split.
62 There are several possible future improvements for this pass including:
64 1) Splitting to break up large functions
65 2) Splitting to reduce stack frame usage
66 3) Allow split part of function to use values computed in the header part.
67 The values needs to be passed to split function, perhaps via same
68 interface as for nested functions or as argument.
69 4) Support for simple rematerialization. I.e. when split part use
70 value computed in header from function parameter in very cheap way, we
71 can just recompute it.
72 5) Support splitting of nested functions.
73 6) Support non-SSA arguments.
74 7) There is nothing preventing us from producing multiple parts of single function
75 when needed or splitting also the parts. */
77 #include "config.h"
78 #include "system.h"
79 #include "coretypes.h"
80 #include "backend.h"
81 #include "rtl.h"
82 #include "tree.h"
83 #include "gimple.h"
84 #include "cfghooks.h"
85 #include "alloc-pool.h"
86 #include "tree-pass.h"
87 #include "ssa.h"
88 #include "cgraph.h"
89 #include "diagnostic.h"
90 #include "fold-const.h"
91 #include "cfganal.h"
92 #include "calls.h"
93 #include "gimplify.h"
94 #include "gimple-iterator.h"
95 #include "gimplify-me.h"
96 #include "gimple-walk.h"
97 #include "symbol-summary.h"
98 #include "ipa-prop.h"
99 #include "tree-cfg.h"
100 #include "tree-into-ssa.h"
101 #include "tree-dfa.h"
102 #include "tree-inline.h"
103 #include "gimple-pretty-print.h"
104 #include "ipa-fnsummary.h"
105 #include "cfgloop.h"
106 #include "attribs.h"
108 /* Per basic block info. */
110 class split_bb_info
112 public:
113 unsigned int size;
114 sreal time;
117 static vec<split_bb_info> bb_info_vec;
119 /* Description of split point. */
121 class split_point
123 public:
124 /* Size of the partitions. */
125 sreal header_time, split_time;
126 unsigned int header_size, split_size;
128 /* SSA names that need to be passed into spit function. */
129 bitmap ssa_names_to_pass;
131 /* Basic block where we split (that will become entry point of new function. */
132 basic_block entry_bb;
134 /* Count for entering the split part.
135 This is not count of the entry_bb because it may be in loop. */
136 profile_count count;
138 /* Basic blocks we are splitting away. */
139 bitmap split_bbs;
141 /* True when return value is computed on split part and thus it needs
142 to be returned. */
143 bool split_part_set_retval;
146 /* Best split point found. */
148 class split_point best_split_point;
150 /* Set of basic blocks that are not allowed to dominate a split point. */
152 static bitmap forbidden_dominators;
154 static tree find_retval (basic_block return_bb);
156 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
157 variable, check it if it is present in bitmap passed via DATA. */
159 static bool
160 test_nonssa_use (gimple *, tree t, tree, void *data)
162 t = get_base_address (t);
164 if (!t || is_gimple_reg (t))
165 return false;
167 if (TREE_CODE (t) == PARM_DECL
168 || (VAR_P (t)
169 && auto_var_in_fn_p (t, current_function_decl))
170 || TREE_CODE (t) == RESULT_DECL
171 /* Normal labels are part of CFG and will be handled gratefully.
172 Forced labels however can be used directly by statements and
173 need to stay in one partition along with their uses. */
174 || (TREE_CODE (t) == LABEL_DECL
175 && FORCED_LABEL (t)))
176 return bitmap_bit_p ((bitmap)data, DECL_UID (t));
178 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
179 to pretend that the value pointed to is actual result decl. */
180 if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
181 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
182 && SSA_NAME_VAR (TREE_OPERAND (t, 0))
183 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
184 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
185 return
186 bitmap_bit_p ((bitmap)data,
187 DECL_UID (DECL_RESULT (current_function_decl)));
189 return false;
192 /* Dump split point CURRENT. */
194 static void
195 dump_split_point (FILE * file, class split_point *current)
197 fprintf (file,
198 "Split point at BB %i\n"
199 " header time: %f header size: %i\n"
200 " split time: %f split size: %i\n bbs: ",
201 current->entry_bb->index, current->header_time.to_double (),
202 current->header_size, current->split_time.to_double (),
203 current->split_size);
204 dump_bitmap (file, current->split_bbs);
205 fprintf (file, " SSA names to pass: ");
206 dump_bitmap (file, current->ssa_names_to_pass);
209 /* Look for all BBs in header that might lead to the split part and verify
210 that they are not defining any non-SSA var used by the split part.
211 Parameters are the same as for consider_split. */
213 static bool
214 verify_non_ssa_vars (class split_point *current, bitmap non_ssa_vars,
215 basic_block return_bb)
217 bitmap seen = BITMAP_ALLOC (NULL);
218 vec<basic_block> worklist = vNULL;
219 edge e;
220 edge_iterator ei;
221 bool ok = true;
222 basic_block bb;
224 FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
225 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
226 && !bitmap_bit_p (current->split_bbs, e->src->index))
228 worklist.safe_push (e->src);
229 bitmap_set_bit (seen, e->src->index);
232 while (!worklist.is_empty ())
234 bb = worklist.pop ();
235 FOR_EACH_EDGE (e, ei, bb->preds)
236 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
237 && bitmap_set_bit (seen, e->src->index))
239 gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
240 e->src->index));
241 worklist.safe_push (e->src);
243 for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
244 gsi_next (&bsi))
246 gimple *stmt = gsi_stmt (bsi);
247 if (is_gimple_debug (stmt))
248 continue;
249 if (walk_stmt_load_store_addr_ops
250 (stmt, non_ssa_vars, test_nonssa_use, test_nonssa_use,
251 test_nonssa_use))
253 ok = false;
254 goto done;
256 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
257 if (test_nonssa_use (stmt, gimple_label_label (label_stmt),
258 NULL_TREE, non_ssa_vars))
260 ok = false;
261 goto done;
264 for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
265 gsi_next (&bsi))
267 if (walk_stmt_load_store_addr_ops
268 (gsi_stmt (bsi), non_ssa_vars, test_nonssa_use, test_nonssa_use,
269 test_nonssa_use))
271 ok = false;
272 goto done;
275 FOR_EACH_EDGE (e, ei, bb->succs)
277 if (e->dest != return_bb)
278 continue;
279 for (gphi_iterator bsi = gsi_start_phis (return_bb);
280 !gsi_end_p (bsi);
281 gsi_next (&bsi))
283 gphi *stmt = bsi.phi ();
284 tree op = gimple_phi_arg_def (stmt, e->dest_idx);
286 if (virtual_operand_p (gimple_phi_result (stmt)))
287 continue;
288 if (TREE_CODE (op) != SSA_NAME
289 && test_nonssa_use (stmt, op, op, non_ssa_vars))
291 ok = false;
292 goto done;
298 /* Verify that the rest of function does not define any label
299 used by the split part. */
300 FOR_EACH_BB_FN (bb, cfun)
301 if (!bitmap_bit_p (current->split_bbs, bb->index)
302 && !bitmap_bit_p (seen, bb->index))
304 gimple_stmt_iterator bsi;
305 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
306 if (glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (bsi)))
308 if (test_nonssa_use (label_stmt,
309 gimple_label_label (label_stmt),
310 NULL_TREE, non_ssa_vars))
312 ok = false;
313 goto done;
316 else
317 break;
320 done:
321 BITMAP_FREE (seen);
322 worklist.release ();
323 return ok;
326 /* If STMT is a call, check the callee against a list of forbidden
327 predicate functions. If a match is found, look for uses of the
328 call result in condition statements that compare against zero.
329 For each such use, find the block targeted by the condition
330 statement for the nonzero result, and set the bit for this block
331 in the forbidden dominators bitmap. The purpose of this is to avoid
332 selecting a split point where we are likely to lose the chance
333 to optimize away an unused function call. */
335 static void
336 check_forbidden_calls (gimple *stmt)
338 imm_use_iterator use_iter;
339 use_operand_p use_p;
340 tree lhs;
342 /* At the moment, __builtin_constant_p is the only forbidden
343 predicate function call (see PR49642). */
344 if (!gimple_call_builtin_p (stmt, BUILT_IN_CONSTANT_P))
345 return;
347 lhs = gimple_call_lhs (stmt);
349 if (!lhs || TREE_CODE (lhs) != SSA_NAME)
350 return;
352 FOR_EACH_IMM_USE_FAST (use_p, use_iter, lhs)
354 tree op1;
355 basic_block use_bb, forbidden_bb;
356 enum tree_code code;
357 edge true_edge, false_edge;
358 gcond *use_stmt;
360 use_stmt = dyn_cast <gcond *> (USE_STMT (use_p));
361 if (!use_stmt)
362 continue;
364 /* Assuming canonical form for GIMPLE_COND here, with constant
365 in second position. */
366 op1 = gimple_cond_rhs (use_stmt);
367 code = gimple_cond_code (use_stmt);
368 use_bb = gimple_bb (use_stmt);
370 extract_true_false_edges_from_block (use_bb, &true_edge, &false_edge);
372 /* We're only interested in comparisons that distinguish
373 unambiguously from zero. */
374 if (!integer_zerop (op1) || code == LE_EXPR || code == GE_EXPR)
375 continue;
377 if (code == EQ_EXPR)
378 forbidden_bb = false_edge->dest;
379 else
380 forbidden_bb = true_edge->dest;
382 bitmap_set_bit (forbidden_dominators, forbidden_bb->index);
386 /* If BB is dominated by any block in the forbidden dominators set,
387 return TRUE; else FALSE. */
389 static bool
390 dominated_by_forbidden (basic_block bb)
392 unsigned dom_bb;
393 bitmap_iterator bi;
395 EXECUTE_IF_SET_IN_BITMAP (forbidden_dominators, 1, dom_bb, bi)
397 if (dominated_by_p (CDI_DOMINATORS, bb,
398 BASIC_BLOCK_FOR_FN (cfun, dom_bb)))
399 return true;
402 return false;
405 /* For give split point CURRENT and return block RETURN_BB return 1
406 if ssa name VAL is set by split part and 0 otherwise. */
407 static bool
408 split_part_set_ssa_name_p (tree val, class split_point *current,
409 basic_block return_bb)
411 if (TREE_CODE (val) != SSA_NAME)
412 return false;
414 return (!SSA_NAME_IS_DEFAULT_DEF (val)
415 && (bitmap_bit_p (current->split_bbs,
416 gimple_bb (SSA_NAME_DEF_STMT (val))->index)
417 || gimple_bb (SSA_NAME_DEF_STMT (val)) == return_bb));
420 /* We found an split_point CURRENT. NON_SSA_VARS is bitmap of all non ssa
421 variables used and RETURN_BB is return basic block.
422 See if we can split function here. */
424 static void
425 consider_split (class split_point *current, bitmap non_ssa_vars,
426 basic_block return_bb)
428 tree parm;
429 unsigned int num_args = 0;
430 unsigned int call_overhead;
431 edge e;
432 edge_iterator ei;
433 gphi_iterator bsi;
434 unsigned int i;
435 tree retval;
436 bool back_edge = false;
438 if (dump_file && (dump_flags & TDF_DETAILS))
439 dump_split_point (dump_file, current);
441 current->count = profile_count::zero ();
442 FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
444 if (e->flags & EDGE_DFS_BACK)
445 back_edge = true;
446 if (!bitmap_bit_p (current->split_bbs, e->src->index))
447 current->count += e->count ();
450 /* Do not split when we would end up calling function anyway.
451 Compares are three state, use !(...<...) to also give up when outcome
452 is unknown. */
453 if (!(current->count
454 < (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale
455 (param_partial_inlining_entry_probability, 100))))
457 /* When profile is guessed, we cannot expect it to give us
458 realistic estimate on likeliness of function taking the
459 complex path. As a special case, when tail of the function is
460 a loop, enable splitting since inlining code skipping the loop
461 is likely noticeable win. */
462 if (back_edge
463 && profile_status_for_fn (cfun) != PROFILE_READ
464 && current->count
465 < ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
467 if (dump_file && (dump_flags & TDF_DETAILS))
469 fprintf (dump_file,
470 " Split before loop, accepting despite low counts");
471 current->count.dump (dump_file);
472 fprintf (dump_file, " ");
473 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.dump (dump_file);
476 else
478 if (dump_file && (dump_flags & TDF_DETAILS))
479 fprintf (dump_file,
480 " Refused: incoming frequency is too large.\n");
481 return;
485 if (!current->header_size)
487 if (dump_file && (dump_flags & TDF_DETAILS))
488 fprintf (dump_file, " Refused: header empty\n");
489 return;
492 /* Verify that PHI args on entry are either virtual or all their operands
493 incoming from header are the same. */
494 for (bsi = gsi_start_phis (current->entry_bb); !gsi_end_p (bsi); gsi_next (&bsi))
496 gphi *stmt = bsi.phi ();
497 tree val = NULL;
499 if (virtual_operand_p (gimple_phi_result (stmt)))
500 continue;
501 for (i = 0; i < gimple_phi_num_args (stmt); i++)
503 edge e = gimple_phi_arg_edge (stmt, i);
504 if (!bitmap_bit_p (current->split_bbs, e->src->index))
506 tree edge_val = gimple_phi_arg_def (stmt, i);
507 if (val && edge_val != val)
509 if (dump_file && (dump_flags & TDF_DETAILS))
510 fprintf (dump_file,
511 " Refused: entry BB has PHI with multiple variants\n");
512 return;
514 val = edge_val;
520 /* See what argument we will pass to the split function and compute
521 call overhead. */
522 call_overhead = eni_size_weights.call_cost;
523 for (parm = DECL_ARGUMENTS (current_function_decl); parm;
524 parm = DECL_CHAIN (parm))
526 if (!is_gimple_reg (parm))
528 if (bitmap_bit_p (non_ssa_vars, DECL_UID (parm)))
530 if (dump_file && (dump_flags & TDF_DETAILS))
531 fprintf (dump_file,
532 " Refused: need to pass non-ssa param values\n");
533 return;
536 else
538 tree ddef = ssa_default_def (cfun, parm);
539 if (ddef
540 && bitmap_bit_p (current->ssa_names_to_pass,
541 SSA_NAME_VERSION (ddef)))
543 if (!VOID_TYPE_P (TREE_TYPE (parm)))
544 call_overhead += estimate_move_cost (TREE_TYPE (parm), false);
545 num_args++;
549 if (!VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
550 call_overhead += estimate_move_cost (TREE_TYPE (TREE_TYPE
551 (current_function_decl)),
552 false);
554 if (current->split_size <= call_overhead)
556 if (dump_file && (dump_flags & TDF_DETAILS))
557 fprintf (dump_file,
558 " Refused: split size is smaller than call overhead\n");
559 return;
561 /* FIXME: The logic here is not very precise, because inliner does use
562 inline predicates to reduce function body size. We add 10 to anticipate
563 that. Next stage1 we should try to be more meaningful here. */
564 if (current->header_size + call_overhead
565 >= (unsigned int)(DECL_DECLARED_INLINE_P (current_function_decl)
566 ? param_max_inline_insns_single
567 : param_max_inline_insns_auto) + 10)
569 if (dump_file && (dump_flags & TDF_DETAILS))
570 fprintf (dump_file,
571 " Refused: header size is too large for inline candidate\n");
572 return;
575 /* Splitting functions brings the target out of comdat group; this will
576 lead to code duplication if the function is reused by other unit.
577 Limit this duplication. This is consistent with limit in tree-sra.c
578 FIXME: with LTO we ought to be able to do better! */
579 if (DECL_ONE_ONLY (current_function_decl)
580 && current->split_size >= (unsigned int) param_max_inline_insns_auto + 10)
582 if (dump_file && (dump_flags & TDF_DETAILS))
583 fprintf (dump_file,
584 " Refused: function is COMDAT and tail is too large\n");
585 return;
587 /* For comdat functions also reject very small tails; those will likely get
588 inlined back and we do not want to risk the duplication overhead.
589 FIXME: with LTO we ought to be able to do better! */
590 if (DECL_ONE_ONLY (current_function_decl)
591 && current->split_size
592 <= (unsigned int) param_early_inlining_insns / 2)
594 if (dump_file && (dump_flags & TDF_DETAILS))
595 fprintf (dump_file,
596 " Refused: function is COMDAT and tail is too small\n");
597 return;
600 /* FIXME: we currently can pass only SSA function parameters to the split
601 arguments. Once parm_adjustment infrastructure is supported by cloning,
602 we can pass more than that. */
603 if (num_args != bitmap_count_bits (current->ssa_names_to_pass))
606 if (dump_file && (dump_flags & TDF_DETAILS))
607 fprintf (dump_file,
608 " Refused: need to pass non-param values\n");
609 return;
612 /* When there are non-ssa vars used in the split region, see if they
613 are used in the header region. If so, reject the split.
614 FIXME: we can use nested function support to access both. */
615 if (!bitmap_empty_p (non_ssa_vars)
616 && !verify_non_ssa_vars (current, non_ssa_vars, return_bb))
618 if (dump_file && (dump_flags & TDF_DETAILS))
619 fprintf (dump_file,
620 " Refused: split part has non-ssa uses\n");
621 return;
624 /* If the split point is dominated by a forbidden block, reject
625 the split. */
626 if (!bitmap_empty_p (forbidden_dominators)
627 && dominated_by_forbidden (current->entry_bb))
629 if (dump_file && (dump_flags & TDF_DETAILS))
630 fprintf (dump_file,
631 " Refused: split point dominated by forbidden block\n");
632 return;
635 /* See if retval used by return bb is computed by header or split part.
636 When it is computed by split part, we need to produce return statement
637 in the split part and add code to header to pass it around.
639 This is bit tricky to test:
640 1) When there is no return_bb or no return value, we always pass
641 value around.
642 2) Invariants are always computed by caller.
643 3) For SSA we need to look if defining statement is in header or split part
644 4) For non-SSA we need to look where the var is computed. */
645 retval = find_retval (return_bb);
646 if (!retval)
648 /* If there is a return_bb with no return value in function returning
649 value by reference, also make the split part return void, otherwise
650 we expansion would try to create a non-POD temporary, which is
651 invalid. */
652 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
653 && DECL_RESULT (current_function_decl)
654 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
655 current->split_part_set_retval = false;
656 else
657 current->split_part_set_retval = true;
659 else if (is_gimple_min_invariant (retval))
660 current->split_part_set_retval = false;
661 /* Special case is value returned by reference we record as if it was non-ssa
662 set to result_decl. */
663 else if (TREE_CODE (retval) == SSA_NAME
664 && SSA_NAME_VAR (retval)
665 && TREE_CODE (SSA_NAME_VAR (retval)) == RESULT_DECL
666 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
667 current->split_part_set_retval
668 = bitmap_bit_p (non_ssa_vars, DECL_UID (SSA_NAME_VAR (retval)));
669 else if (TREE_CODE (retval) == SSA_NAME)
670 current->split_part_set_retval
671 = split_part_set_ssa_name_p (retval, current, return_bb);
672 else if (TREE_CODE (retval) == PARM_DECL)
673 current->split_part_set_retval = false;
674 else if (VAR_P (retval)
675 || TREE_CODE (retval) == RESULT_DECL)
676 current->split_part_set_retval
677 = bitmap_bit_p (non_ssa_vars, DECL_UID (retval));
678 else
679 current->split_part_set_retval = true;
681 /* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
682 for the return value. If there are other PHIs, give up. */
683 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
685 gphi_iterator psi;
687 for (psi = gsi_start_phis (return_bb); !gsi_end_p (psi); gsi_next (&psi))
688 if (!virtual_operand_p (gimple_phi_result (psi.phi ()))
689 && !(retval
690 && current->split_part_set_retval
691 && TREE_CODE (retval) == SSA_NAME
692 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))
693 && SSA_NAME_DEF_STMT (retval) == psi.phi ()))
695 if (dump_file && (dump_flags & TDF_DETAILS))
696 fprintf (dump_file,
697 " Refused: return bb has extra PHIs\n");
698 return;
702 if (dump_file && (dump_flags & TDF_DETAILS))
703 fprintf (dump_file, " Accepted!\n");
705 /* At the moment chose split point with lowest count and that leaves
706 out smallest size of header.
707 In future we might re-consider this heuristics. */
708 if (!best_split_point.split_bbs
709 || best_split_point.count
710 > current->count
711 || (best_split_point.count == current->count
712 && best_split_point.split_size < current->split_size))
715 if (dump_file && (dump_flags & TDF_DETAILS))
716 fprintf (dump_file, " New best split point!\n");
717 if (best_split_point.ssa_names_to_pass)
719 BITMAP_FREE (best_split_point.ssa_names_to_pass);
720 BITMAP_FREE (best_split_point.split_bbs);
722 best_split_point = *current;
723 best_split_point.ssa_names_to_pass = BITMAP_ALLOC (NULL);
724 bitmap_copy (best_split_point.ssa_names_to_pass,
725 current->ssa_names_to_pass);
726 best_split_point.split_bbs = BITMAP_ALLOC (NULL);
727 bitmap_copy (best_split_point.split_bbs, current->split_bbs);
731 /* Return basic block containing RETURN statement. We allow basic blocks
732 of the form:
733 <retval> = tmp_var;
734 return <retval>
735 but return_bb cannot be more complex than this (except for
736 -fsanitize=thread we allow TSAN_FUNC_EXIT () internal call in there).
737 If nothing is found, return the exit block.
739 When there are multiple RETURN statement, chose one with return value,
740 since that one is more likely shared by multiple code paths.
742 Return BB is special, because for function splitting it is the only
743 basic block that is duplicated in between header and split part of the
744 function.
746 TODO: We might support multiple return blocks. */
748 static basic_block
749 find_return_bb (void)
751 edge e;
752 basic_block return_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
753 gimple_stmt_iterator bsi;
754 bool found_return = false;
755 tree retval = NULL_TREE;
757 if (!single_pred_p (EXIT_BLOCK_PTR_FOR_FN (cfun)))
758 return return_bb;
760 e = single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun));
761 for (bsi = gsi_last_bb (e->src); !gsi_end_p (bsi); gsi_prev (&bsi))
763 gimple *stmt = gsi_stmt (bsi);
764 if (gimple_code (stmt) == GIMPLE_LABEL
765 || is_gimple_debug (stmt)
766 || gimple_clobber_p (stmt))
768 else if (gimple_code (stmt) == GIMPLE_ASSIGN
769 && found_return
770 && gimple_assign_single_p (stmt)
771 && (auto_var_in_fn_p (gimple_assign_rhs1 (stmt),
772 current_function_decl)
773 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
774 && retval == gimple_assign_lhs (stmt))
776 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
778 found_return = true;
779 retval = gimple_return_retval (return_stmt);
781 /* For -fsanitize=thread, allow also TSAN_FUNC_EXIT () in the return
782 bb. */
783 else if ((flag_sanitize & SANITIZE_THREAD)
784 && gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT))
786 else
787 break;
789 if (gsi_end_p (bsi) && found_return)
790 return_bb = e->src;
792 return return_bb;
795 /* Given return basic block RETURN_BB, see where return value is really
796 stored. */
797 static tree
798 find_retval (basic_block return_bb)
800 gimple_stmt_iterator bsi;
801 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi); gsi_next (&bsi))
802 if (greturn *return_stmt = dyn_cast <greturn *> (gsi_stmt (bsi)))
803 return gimple_return_retval (return_stmt);
804 else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
805 && !gimple_clobber_p (gsi_stmt (bsi)))
806 return gimple_assign_rhs1 (gsi_stmt (bsi));
807 return NULL;
810 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
811 variable, mark it as used in bitmap passed via DATA.
812 Return true when access to T prevents splitting the function. */
814 static bool
815 mark_nonssa_use (gimple *, tree t, tree, void *data)
817 t = get_base_address (t);
819 if (!t || is_gimple_reg (t))
820 return false;
822 /* At present we can't pass non-SSA arguments to split function.
823 FIXME: this can be relaxed by passing references to arguments. */
824 if (TREE_CODE (t) == PARM_DECL)
826 if (dump_file && (dump_flags & TDF_DETAILS))
827 fprintf (dump_file,
828 "Cannot split: use of non-ssa function parameter.\n");
829 return true;
832 if ((VAR_P (t) && auto_var_in_fn_p (t, current_function_decl))
833 || TREE_CODE (t) == RESULT_DECL
834 || (TREE_CODE (t) == LABEL_DECL && FORCED_LABEL (t)))
835 bitmap_set_bit ((bitmap)data, DECL_UID (t));
837 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
838 to pretend that the value pointed to is actual result decl. */
839 if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
840 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
841 && SSA_NAME_VAR (TREE_OPERAND (t, 0))
842 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
843 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
844 return
845 bitmap_bit_p ((bitmap)data,
846 DECL_UID (DECL_RESULT (current_function_decl)));
848 return false;
851 /* Compute local properties of basic block BB we collect when looking for
852 split points. We look for ssa defs and store them in SET_SSA_NAMES,
853 for ssa uses and store them in USED_SSA_NAMES and for any non-SSA automatic
854 vars stored in NON_SSA_VARS.
856 When BB has edge to RETURN_BB, collect uses in RETURN_BB too.
858 Return false when BB contains something that prevents it from being put into
859 split function. */
861 static bool
862 visit_bb (basic_block bb, basic_block return_bb,
863 bitmap set_ssa_names, bitmap used_ssa_names,
864 bitmap non_ssa_vars)
866 edge e;
867 edge_iterator ei;
868 bool can_split = true;
870 for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
871 gsi_next (&bsi))
873 gimple *stmt = gsi_stmt (bsi);
874 tree op;
875 ssa_op_iter iter;
876 tree decl;
878 if (is_gimple_debug (stmt))
879 continue;
881 if (gimple_clobber_p (stmt))
882 continue;
884 /* FIXME: We can split regions containing EH. We cannot however
885 split RESX, EH_DISPATCH and EH_POINTER referring to same region
886 into different partitions. This would require tracking of
887 EH regions and checking in consider_split_point if they
888 are not used elsewhere. */
889 if (gimple_code (stmt) == GIMPLE_RESX)
891 if (dump_file && (dump_flags & TDF_DETAILS))
892 fprintf (dump_file, "Cannot split: resx.\n");
893 can_split = false;
895 if (gimple_code (stmt) == GIMPLE_EH_DISPATCH)
897 if (dump_file && (dump_flags & TDF_DETAILS))
898 fprintf (dump_file, "Cannot split: eh dispatch.\n");
899 can_split = false;
902 /* Check builtins that prevent splitting. */
903 if (gimple_code (stmt) == GIMPLE_CALL
904 && (decl = gimple_call_fndecl (stmt)) != NULL_TREE
905 && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
906 switch (DECL_FUNCTION_CODE (decl))
908 /* FIXME: once we will allow passing non-parm values to split part,
909 we need to be sure to handle correct builtin_stack_save and
910 builtin_stack_restore. At the moment we are safe; there is no
911 way to store builtin_stack_save result in non-SSA variable
912 since all calls to those are compiler generated. */
913 case BUILT_IN_APPLY:
914 case BUILT_IN_APPLY_ARGS:
915 case BUILT_IN_VA_START:
916 if (dump_file && (dump_flags & TDF_DETAILS))
917 fprintf (dump_file,
918 "Cannot split: builtin_apply and va_start.\n");
919 can_split = false;
920 break;
921 case BUILT_IN_EH_POINTER:
922 if (dump_file && (dump_flags & TDF_DETAILS))
923 fprintf (dump_file, "Cannot split: builtin_eh_pointer.\n");
924 can_split = false;
925 break;
926 default:
927 break;
930 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
931 bitmap_set_bit (set_ssa_names, SSA_NAME_VERSION (op));
932 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
933 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
934 can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
935 mark_nonssa_use,
936 mark_nonssa_use,
937 mark_nonssa_use);
939 for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
940 gsi_next (&bsi))
942 gphi *stmt = bsi.phi ();
943 unsigned int i;
945 if (virtual_operand_p (gimple_phi_result (stmt)))
946 continue;
947 bitmap_set_bit (set_ssa_names,
948 SSA_NAME_VERSION (gimple_phi_result (stmt)));
949 for (i = 0; i < gimple_phi_num_args (stmt); i++)
951 tree op = gimple_phi_arg_def (stmt, i);
952 if (TREE_CODE (op) == SSA_NAME)
953 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
955 can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
956 mark_nonssa_use,
957 mark_nonssa_use,
958 mark_nonssa_use);
960 /* Record also uses coming from PHI operand in return BB. */
961 FOR_EACH_EDGE (e, ei, bb->succs)
962 if (e->dest == return_bb)
964 for (gphi_iterator bsi = gsi_start_phis (return_bb);
965 !gsi_end_p (bsi);
966 gsi_next (&bsi))
968 gphi *stmt = bsi.phi ();
969 tree op = gimple_phi_arg_def (stmt, e->dest_idx);
971 if (virtual_operand_p (gimple_phi_result (stmt)))
972 continue;
973 if (TREE_CODE (op) == SSA_NAME)
974 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
975 else
976 can_split &= !mark_nonssa_use (stmt, op, op, non_ssa_vars);
979 return can_split;
982 /* Stack entry for recursive DFS walk in find_split_point. */
984 class stack_entry
986 public:
987 /* Basic block we are examining. */
988 basic_block bb;
990 /* SSA names set and used by the BB and all BBs reachable
991 from it via DFS walk. */
992 bitmap set_ssa_names, used_ssa_names;
993 bitmap non_ssa_vars;
995 /* All BBS visited from this BB via DFS walk. */
996 bitmap bbs_visited;
998 /* Last examined edge in DFS walk. Since we walk unoriented graph,
999 the value is up to sum of incoming and outgoing edges of BB. */
1000 unsigned int edge_num;
1002 /* Stack entry index of earliest BB reachable from current BB
1003 or any BB visited later in DFS walk. */
1004 int earliest;
1006 /* Overall time and size of all BBs reached from this BB in DFS walk. */
1007 sreal overall_time;
1008 int overall_size;
1010 /* When false we cannot split on this BB. */
1011 bool can_split;
1015 /* Find all articulations and call consider_split on them.
1016 OVERALL_TIME and OVERALL_SIZE is time and size of the function.
1018 We perform basic algorithm for finding an articulation in a graph
1019 created from CFG by considering it to be an unoriented graph.
1021 The articulation is discovered via DFS walk. We collect earliest
1022 basic block on stack that is reachable via backward edge. Articulation
1023 is any basic block such that there is no backward edge bypassing it.
1024 To reduce stack usage we maintain heap allocated stack in STACK vector.
1025 AUX pointer of BB is set to index it appears in the stack or -1 once
1026 it is visited and popped off the stack.
1028 The algorithm finds articulation after visiting the whole component
1029 reachable by it. This makes it convenient to collect information about
1030 the component used by consider_split. */
1032 static void
1033 find_split_points (basic_block return_bb, sreal overall_time, int overall_size)
1035 stack_entry first;
1036 vec<stack_entry> stack = vNULL;
1037 basic_block bb;
1038 class split_point current;
1040 current.header_time = overall_time;
1041 current.header_size = overall_size;
1042 current.split_time = 0;
1043 current.split_size = 0;
1044 current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
1046 first.bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1047 first.edge_num = 0;
1048 first.overall_time = 0;
1049 first.overall_size = 0;
1050 first.earliest = INT_MAX;
1051 first.set_ssa_names = 0;
1052 first.used_ssa_names = 0;
1053 first.non_ssa_vars = 0;
1054 first.bbs_visited = 0;
1055 first.can_split = false;
1056 stack.safe_push (first);
1057 ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = (void *)(intptr_t)-1;
1059 while (!stack.is_empty ())
1061 stack_entry *entry = &stack.last ();
1063 /* We are walking an acyclic graph, so edge_num counts
1064 succ and pred edges together. However when considering
1065 articulation, we want to have processed everything reachable
1066 from articulation but nothing that reaches into it. */
1067 if (entry->edge_num == EDGE_COUNT (entry->bb->succs)
1068 && entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1070 int pos = stack.length ();
1071 entry->can_split &= visit_bb (entry->bb, return_bb,
1072 entry->set_ssa_names,
1073 entry->used_ssa_names,
1074 entry->non_ssa_vars);
1075 if (pos <= entry->earliest && !entry->can_split
1076 && dump_file && (dump_flags & TDF_DETAILS))
1077 fprintf (dump_file,
1078 "found articulation at bb %i but cannot split\n",
1079 entry->bb->index);
1080 if (pos <= entry->earliest && entry->can_split)
1082 if (dump_file && (dump_flags & TDF_DETAILS))
1083 fprintf (dump_file, "found articulation at bb %i\n",
1084 entry->bb->index);
1085 current.entry_bb = entry->bb;
1086 current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
1087 bitmap_and_compl (current.ssa_names_to_pass,
1088 entry->used_ssa_names, entry->set_ssa_names);
1089 current.header_time = overall_time - entry->overall_time;
1090 current.header_size = overall_size - entry->overall_size;
1091 current.split_time = entry->overall_time;
1092 current.split_size = entry->overall_size;
1093 current.split_bbs = entry->bbs_visited;
1094 consider_split (&current, entry->non_ssa_vars, return_bb);
1095 BITMAP_FREE (current.ssa_names_to_pass);
1098 /* Do actual DFS walk. */
1099 if (entry->edge_num
1100 < (EDGE_COUNT (entry->bb->succs)
1101 + EDGE_COUNT (entry->bb->preds)))
1103 edge e;
1104 basic_block dest;
1105 if (entry->edge_num < EDGE_COUNT (entry->bb->succs))
1107 e = EDGE_SUCC (entry->bb, entry->edge_num);
1108 dest = e->dest;
1110 else
1112 e = EDGE_PRED (entry->bb, entry->edge_num
1113 - EDGE_COUNT (entry->bb->succs));
1114 dest = e->src;
1117 entry->edge_num++;
1119 /* New BB to visit, push it to the stack. */
1120 if (dest != return_bb && dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1121 && !dest->aux)
1123 stack_entry new_entry;
1125 new_entry.bb = dest;
1126 new_entry.edge_num = 0;
1127 new_entry.overall_time
1128 = bb_info_vec[dest->index].time;
1129 new_entry.overall_size
1130 = bb_info_vec[dest->index].size;
1131 new_entry.earliest = INT_MAX;
1132 new_entry.set_ssa_names = BITMAP_ALLOC (NULL);
1133 new_entry.used_ssa_names = BITMAP_ALLOC (NULL);
1134 new_entry.bbs_visited = BITMAP_ALLOC (NULL);
1135 new_entry.non_ssa_vars = BITMAP_ALLOC (NULL);
1136 new_entry.can_split = true;
1137 bitmap_set_bit (new_entry.bbs_visited, dest->index);
1138 stack.safe_push (new_entry);
1139 dest->aux = (void *)(intptr_t)stack.length ();
1141 /* Back edge found, record the earliest point. */
1142 else if ((intptr_t)dest->aux > 0
1143 && (intptr_t)dest->aux < entry->earliest)
1144 entry->earliest = (intptr_t)dest->aux;
1146 /* We are done with examining the edges. Pop off the value from stack
1147 and merge stuff we accumulate during the walk. */
1148 else if (entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1150 stack_entry *prev = &stack[stack.length () - 2];
1152 entry->bb->aux = (void *)(intptr_t)-1;
1153 prev->can_split &= entry->can_split;
1154 if (prev->set_ssa_names)
1156 bitmap_ior_into (prev->set_ssa_names, entry->set_ssa_names);
1157 bitmap_ior_into (prev->used_ssa_names, entry->used_ssa_names);
1158 bitmap_ior_into (prev->bbs_visited, entry->bbs_visited);
1159 bitmap_ior_into (prev->non_ssa_vars, entry->non_ssa_vars);
1161 if (prev->earliest > entry->earliest)
1162 prev->earliest = entry->earliest;
1163 prev->overall_time += entry->overall_time;
1164 prev->overall_size += entry->overall_size;
1165 BITMAP_FREE (entry->set_ssa_names);
1166 BITMAP_FREE (entry->used_ssa_names);
1167 BITMAP_FREE (entry->bbs_visited);
1168 BITMAP_FREE (entry->non_ssa_vars);
1169 stack.pop ();
1171 else
1172 stack.pop ();
1174 ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = NULL;
1175 FOR_EACH_BB_FN (bb, cfun)
1176 bb->aux = NULL;
1177 stack.release ();
1178 BITMAP_FREE (current.ssa_names_to_pass);
1181 /* Split function at SPLIT_POINT. */
1183 static void
1184 split_function (basic_block return_bb, class split_point *split_point,
1185 bool add_tsan_func_exit)
1187 vec<tree> args_to_pass = vNULL;
1188 bitmap args_to_skip;
1189 tree parm;
1190 int num = 0;
1191 cgraph_node *node, *cur_node = cgraph_node::get (current_function_decl);
1192 basic_block call_bb;
1193 gcall *call, *tsan_func_exit_call = NULL;
1194 edge e;
1195 edge_iterator ei;
1196 tree retval = NULL, real_retval = NULL;
1197 gimple *last_stmt = NULL;
1198 unsigned int i;
1199 tree arg, ddef;
1201 if (dump_file)
1203 fprintf (dump_file, "\n\nSplitting function at:\n");
1204 dump_split_point (dump_file, split_point);
1207 if (cur_node->can_change_signature)
1208 args_to_skip = BITMAP_ALLOC (NULL);
1209 else
1210 args_to_skip = NULL;
1212 /* Collect the parameters of new function and args_to_skip bitmap. */
1213 for (parm = DECL_ARGUMENTS (current_function_decl);
1214 parm; parm = DECL_CHAIN (parm), num++)
1215 if (args_to_skip
1216 && (!is_gimple_reg (parm)
1217 || (ddef = ssa_default_def (cfun, parm)) == NULL_TREE
1218 || !bitmap_bit_p (split_point->ssa_names_to_pass,
1219 SSA_NAME_VERSION (ddef))))
1220 bitmap_set_bit (args_to_skip, num);
1221 else
1223 /* This parm might not have been used up to now, but is going to be
1224 used, hence register it. */
1225 if (is_gimple_reg (parm))
1226 arg = get_or_create_ssa_default_def (cfun, parm);
1227 else
1228 arg = parm;
1230 if (!useless_type_conversion_p (DECL_ARG_TYPE (parm), TREE_TYPE (arg)))
1231 arg = fold_convert (DECL_ARG_TYPE (parm), arg);
1232 args_to_pass.safe_push (arg);
1235 /* See if the split function will return. */
1236 bool split_part_return_p = false;
1237 FOR_EACH_EDGE (e, ei, return_bb->preds)
1239 if (bitmap_bit_p (split_point->split_bbs, e->src->index))
1240 split_part_return_p = true;
1243 /* Add return block to what will become the split function.
1244 We do not return; no return block is needed. */
1245 if (!split_part_return_p)
1247 /* We have no return block, so nothing is needed. */
1248 else if (return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1250 /* When we do not want to return value, we need to construct
1251 new return block with empty return statement.
1252 FIXME: Once we are able to change return type, we should change function
1253 to return void instead of just outputting function with undefined return
1254 value. For structures this affects quality of codegen. */
1255 else if ((retval = find_retval (return_bb))
1256 && !split_point->split_part_set_retval)
1258 bool redirected = true;
1259 basic_block new_return_bb = create_basic_block (NULL, 0, return_bb);
1260 gimple_stmt_iterator gsi = gsi_start_bb (new_return_bb);
1261 gsi_insert_after (&gsi, gimple_build_return (NULL), GSI_NEW_STMT);
1262 new_return_bb->count = profile_count::zero ();
1263 while (redirected)
1265 redirected = false;
1266 FOR_EACH_EDGE (e, ei, return_bb->preds)
1267 if (bitmap_bit_p (split_point->split_bbs, e->src->index))
1269 new_return_bb->count += e->count ();
1270 redirect_edge_and_branch (e, new_return_bb);
1271 redirected = true;
1272 break;
1275 e = make_single_succ_edge (new_return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1276 add_bb_to_loop (new_return_bb, current_loops->tree_root);
1277 bitmap_set_bit (split_point->split_bbs, new_return_bb->index);
1279 /* When we pass around the value, use existing return block. */
1280 else
1281 bitmap_set_bit (split_point->split_bbs, return_bb->index);
1283 /* If RETURN_BB has virtual operand PHIs, they must be removed and the
1284 virtual operand marked for renaming as we change the CFG in a way that
1285 tree-inline is not able to compensate for.
1287 Note this can happen whether or not we have a return value. If we have
1288 a return value, then RETURN_BB may have PHIs for real operands too. */
1289 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1291 bool phi_p = false;
1292 for (gphi_iterator gsi = gsi_start_phis (return_bb);
1293 !gsi_end_p (gsi);)
1295 gphi *stmt = gsi.phi ();
1296 if (!virtual_operand_p (gimple_phi_result (stmt)))
1298 gsi_next (&gsi);
1299 continue;
1301 mark_virtual_phi_result_for_renaming (stmt);
1302 remove_phi_node (&gsi, true);
1303 phi_p = true;
1305 /* In reality we have to rename the reaching definition of the
1306 virtual operand at return_bb as we will eventually release it
1307 when we remove the code region we outlined.
1308 So we have to rename all immediate virtual uses of that region
1309 if we didn't see a PHI definition yet. */
1310 /* ??? In real reality we want to set the reaching vdef of the
1311 entry of the SESE region as the vuse of the call and the reaching
1312 vdef of the exit of the SESE region as the vdef of the call. */
1313 if (!phi_p)
1314 for (gimple_stmt_iterator gsi = gsi_start_bb (return_bb);
1315 !gsi_end_p (gsi);
1316 gsi_next (&gsi))
1318 gimple *stmt = gsi_stmt (gsi);
1319 if (gimple_vuse (stmt))
1321 gimple_set_vuse (stmt, NULL_TREE);
1322 update_stmt (stmt);
1324 if (gimple_vdef (stmt))
1325 break;
1329 ipa_param_adjustments *adjustments;
1330 bool skip_return = (!split_part_return_p
1331 || !split_point->split_part_set_retval);
1332 /* TODO: Perhaps get rid of args_to_skip entirely, after we make sure the
1333 debug info generation and discrepancy avoiding works well too. */
1334 if ((args_to_skip && !bitmap_empty_p (args_to_skip))
1335 || skip_return)
1337 vec<ipa_adjusted_param, va_gc> *new_params = NULL;
1338 unsigned j;
1339 for (parm = DECL_ARGUMENTS (current_function_decl), j = 0;
1340 parm; parm = DECL_CHAIN (parm), j++)
1341 if (!args_to_skip || !bitmap_bit_p (args_to_skip, j))
1343 ipa_adjusted_param adj;
1344 memset (&adj, 0, sizeof (adj));
1345 adj.op = IPA_PARAM_OP_COPY;
1346 adj.base_index = j;
1347 adj.prev_clone_index = j;
1348 vec_safe_push (new_params, adj);
1350 adjustments = new ipa_param_adjustments (new_params, j, skip_return);
1352 else
1353 adjustments = NULL;
1355 /* Now create the actual clone. */
1356 cgraph_edge::rebuild_edges ();
1357 node = cur_node->create_version_clone_with_body
1358 (vNULL, NULL, adjustments,
1359 split_point->split_bbs, split_point->entry_bb, "part");
1360 delete adjustments;
1361 node->split_part = true;
1363 if (cur_node->same_comdat_group)
1365 /* TODO: call is versionable if we make sure that all
1366 callers are inside of a comdat group. */
1367 cur_node->calls_comdat_local = true;
1368 node->add_to_same_comdat_group (cur_node);
1372 /* Let's take a time profile for splitted function. */
1373 if (cur_node->tp_first_run)
1374 node->tp_first_run = cur_node->tp_first_run + 1;
1376 /* For usual cloning it is enough to clear builtin only when signature
1377 changes. For partial inlining we however cannot expect the part
1378 of builtin implementation to have same semantic as the whole. */
1379 if (fndecl_built_in_p (node->decl))
1380 set_decl_built_in_function (node->decl, NOT_BUILT_IN, 0);
1382 /* If return_bb contains any clobbers that refer to SSA_NAMEs
1383 set in the split part, remove them. Also reset debug stmts that
1384 refer to SSA_NAMEs set in the split part. */
1385 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1387 gimple_stmt_iterator gsi = gsi_start_bb (return_bb);
1388 while (!gsi_end_p (gsi))
1390 tree op;
1391 ssa_op_iter iter;
1392 gimple *stmt = gsi_stmt (gsi);
1393 bool remove = false;
1394 if (gimple_clobber_p (stmt) || is_gimple_debug (stmt))
1395 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1397 basic_block bb = gimple_bb (SSA_NAME_DEF_STMT (op));
1398 if (op != retval
1399 && bb
1400 && bb != return_bb
1401 && bitmap_bit_p (split_point->split_bbs, bb->index))
1403 if (is_gimple_debug (stmt))
1405 gimple_debug_bind_reset_value (stmt);
1406 update_stmt (stmt);
1408 else
1409 remove = true;
1410 break;
1413 if (remove)
1414 gsi_remove (&gsi, true);
1415 else
1416 gsi_next (&gsi);
1420 /* If the original function is declared inline, there is no point in issuing
1421 a warning for the non-inlinable part. */
1422 DECL_NO_INLINE_WARNING_P (node->decl) = 1;
1423 cur_node->remove_callees ();
1424 cur_node->remove_all_references ();
1425 if (!split_part_return_p)
1426 TREE_THIS_VOLATILE (node->decl) = 1;
1427 if (dump_file)
1428 dump_function_to_file (node->decl, dump_file, dump_flags);
1430 /* Create the basic block we place call into. It is the entry basic block
1431 split after last label. */
1432 call_bb = split_point->entry_bb;
1433 for (gimple_stmt_iterator gsi = gsi_start_bb (call_bb); !gsi_end_p (gsi);)
1434 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
1436 last_stmt = gsi_stmt (gsi);
1437 gsi_next (&gsi);
1439 else
1440 break;
1441 call_bb->count = split_point->count;
1442 e = split_block (split_point->entry_bb, last_stmt);
1443 remove_edge (e);
1445 /* Produce the call statement. */
1446 gimple_stmt_iterator gsi = gsi_last_bb (call_bb);
1447 FOR_EACH_VEC_ELT (args_to_pass, i, arg)
1448 if (!is_gimple_val (arg))
1450 arg = force_gimple_operand_gsi (&gsi, arg, true, NULL_TREE,
1451 false, GSI_CONTINUE_LINKING);
1452 args_to_pass[i] = arg;
1454 call = gimple_build_call_vec (node->decl, args_to_pass);
1455 gimple_set_block (call, DECL_INITIAL (current_function_decl));
1456 args_to_pass.release ();
1458 /* For optimized away parameters, add on the caller side
1459 before the call
1460 DEBUG D#X => parm_Y(D)
1461 stmts and associate D#X with parm in decl_debug_args_lookup
1462 vector to say for debug info that if parameter parm had been passed,
1463 it would have value parm_Y(D). */
1464 if (args_to_skip)
1466 vec<tree, va_gc> **debug_args = NULL;
1467 unsigned i = 0, len = 0;
1468 if (MAY_HAVE_DEBUG_BIND_STMTS)
1470 debug_args = decl_debug_args_lookup (node->decl);
1471 if (debug_args)
1472 len = vec_safe_length (*debug_args);
1474 for (parm = DECL_ARGUMENTS (current_function_decl), num = 0;
1475 parm; parm = DECL_CHAIN (parm), num++)
1476 if (bitmap_bit_p (args_to_skip, num) && is_gimple_reg (parm))
1478 tree ddecl;
1479 gimple *def_temp;
1481 /* This needs to be done even without
1482 MAY_HAVE_DEBUG_BIND_STMTS, otherwise if it didn't exist
1483 before, we'd end up with different SSA_NAME_VERSIONs
1484 between -g and -g0. */
1485 arg = get_or_create_ssa_default_def (cfun, parm);
1486 if (!MAY_HAVE_DEBUG_BIND_STMTS || debug_args == NULL)
1487 continue;
1489 while (i < len && (**debug_args)[i] != DECL_ORIGIN (parm))
1490 i += 2;
1491 if (i >= len)
1492 continue;
1493 ddecl = (**debug_args)[i + 1];
1494 def_temp
1495 = gimple_build_debug_bind (ddecl, unshare_expr (arg), call);
1496 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
1498 BITMAP_FREE (args_to_skip);
1501 /* We avoid address being taken on any variable used by split part,
1502 so return slot optimization is always possible. Moreover this is
1503 required to make DECL_BY_REFERENCE work. */
1504 if (aggregate_value_p (DECL_RESULT (current_function_decl),
1505 TREE_TYPE (current_function_decl))
1506 && (!is_gimple_reg_type (TREE_TYPE (DECL_RESULT (current_function_decl)))
1507 || DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))))
1508 gimple_call_set_return_slot_opt (call, true);
1510 if (add_tsan_func_exit)
1511 tsan_func_exit_call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
1513 /* Update return value. This is bit tricky. When we do not return,
1514 do nothing. When we return we might need to update return_bb
1515 or produce a new return statement. */
1516 if (!split_part_return_p)
1518 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1519 if (tsan_func_exit_call)
1520 gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1522 else
1524 e = make_single_succ_edge (call_bb, return_bb,
1525 return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1526 ? 0 : EDGE_FALLTHRU);
1528 /* If there is return basic block, see what value we need to store
1529 return value into and put call just before it. */
1530 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1532 real_retval = retval;
1533 if (real_retval && split_point->split_part_set_retval)
1535 gphi_iterator psi;
1537 /* See if we need new SSA_NAME for the result.
1538 When DECL_BY_REFERENCE is true, retval is actually pointer to
1539 return value and it is constant in whole function. */
1540 if (TREE_CODE (retval) == SSA_NAME
1541 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1543 retval = copy_ssa_name (retval, call);
1545 /* See if there is PHI defining return value. */
1546 for (psi = gsi_start_phis (return_bb);
1547 !gsi_end_p (psi); gsi_next (&psi))
1548 if (!virtual_operand_p (gimple_phi_result (psi.phi ())))
1549 break;
1551 /* When there is PHI, just update its value. */
1552 if (TREE_CODE (retval) == SSA_NAME
1553 && !gsi_end_p (psi))
1554 add_phi_arg (psi.phi (), retval, e, UNKNOWN_LOCATION);
1555 /* Otherwise update the return BB itself.
1556 find_return_bb allows at most one assignment to return value,
1557 so update first statement. */
1558 else
1560 gimple_stmt_iterator bsi;
1561 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
1562 gsi_next (&bsi))
1563 if (greturn *return_stmt
1564 = dyn_cast <greturn *> (gsi_stmt (bsi)))
1566 gimple_return_set_retval (return_stmt, retval);
1567 break;
1569 else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
1570 && !gimple_clobber_p (gsi_stmt (bsi)))
1572 gimple_assign_set_rhs1 (gsi_stmt (bsi), retval);
1573 break;
1575 update_stmt (gsi_stmt (bsi));
1576 /* Also adjust clobbers and debug stmts in return_bb. */
1577 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
1578 gsi_next (&bsi))
1580 gimple *stmt = gsi_stmt (bsi);
1581 if (gimple_clobber_p (stmt)
1582 || is_gimple_debug (stmt))
1584 ssa_op_iter iter;
1585 use_operand_p use_p;
1586 bool update = false;
1587 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
1588 SSA_OP_USE)
1589 if (USE_FROM_PTR (use_p) == real_retval)
1591 SET_USE (use_p, retval);
1592 update = true;
1594 if (update)
1595 update_stmt (stmt);
1600 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1602 gimple_call_set_lhs (call, build_simple_mem_ref (retval));
1603 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1605 else
1607 tree restype;
1608 restype = TREE_TYPE (DECL_RESULT (current_function_decl));
1609 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1610 if (!useless_type_conversion_p (TREE_TYPE (retval), restype))
1612 gimple *cpy;
1613 tree tem = create_tmp_reg (restype);
1614 tem = make_ssa_name (tem, call);
1615 cpy = gimple_build_assign (retval, NOP_EXPR, tem);
1616 gsi_insert_after (&gsi, cpy, GSI_NEW_STMT);
1617 retval = tem;
1619 gimple_call_set_lhs (call, retval);
1620 update_stmt (call);
1623 else
1624 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1625 if (tsan_func_exit_call)
1626 gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1628 /* We don't use return block (there is either no return in function or
1629 multiple of them). So create new basic block with return statement.
1631 else
1633 greturn *ret;
1634 if (split_point->split_part_set_retval
1635 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1637 retval = DECL_RESULT (current_function_decl);
1639 /* We use temporary register to hold value when aggregate_value_p
1640 is false. Similarly for DECL_BY_REFERENCE we must avoid extra
1641 copy. */
1642 if (!aggregate_value_p (retval, TREE_TYPE (current_function_decl))
1643 && !DECL_BY_REFERENCE (retval))
1644 retval = create_tmp_reg (TREE_TYPE (retval));
1645 if (is_gimple_reg (retval))
1647 /* When returning by reference, there is only one SSA name
1648 assigned to RESULT_DECL (that is pointer to return value).
1649 Look it up or create new one if it is missing. */
1650 if (DECL_BY_REFERENCE (retval))
1651 retval = get_or_create_ssa_default_def (cfun, retval);
1652 /* Otherwise produce new SSA name for return value. */
1653 else
1654 retval = make_ssa_name (retval, call);
1656 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1657 gimple_call_set_lhs (call, build_simple_mem_ref (retval));
1658 else
1659 gimple_call_set_lhs (call, retval);
1660 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1662 else
1664 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1665 if (retval
1666 && is_gimple_reg_type (TREE_TYPE (retval))
1667 && !is_gimple_val (retval))
1669 gassign *g
1670 = gimple_build_assign (make_ssa_name (TREE_TYPE (retval)),
1671 retval);
1672 retval = gimple_assign_lhs (g);
1673 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1676 if (tsan_func_exit_call)
1677 gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1678 ret = gimple_build_return (retval);
1679 gsi_insert_after (&gsi, ret, GSI_NEW_STMT);
1682 free_dominance_info (CDI_DOMINATORS);
1683 free_dominance_info (CDI_POST_DOMINATORS);
1684 compute_fn_summary (node, true);
1687 /* Execute function splitting pass. */
1689 static unsigned int
1690 execute_split_functions (void)
1692 gimple_stmt_iterator bsi;
1693 basic_block bb;
1694 sreal overall_time = 0;
1695 int overall_size = 0;
1696 int todo = 0;
1697 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1699 if (flags_from_decl_or_type (current_function_decl)
1700 & (ECF_NORETURN|ECF_MALLOC))
1702 if (dump_file)
1703 fprintf (dump_file, "Not splitting: noreturn/malloc function.\n");
1704 return 0;
1706 if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
1708 if (dump_file)
1709 fprintf (dump_file, "Not splitting: main function.\n");
1710 return 0;
1712 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
1714 if (dump_file)
1715 fprintf (dump_file, "Not splitting: function is unlikely executed.\n");
1716 return 0;
1718 /* This can be relaxed; function might become inlinable after splitting
1719 away the uninlinable part. */
1720 if (ipa_fn_summaries
1721 && ipa_fn_summaries->get (node)
1722 && !ipa_fn_summaries->get (node)->inlinable)
1724 if (dump_file)
1725 fprintf (dump_file, "Not splitting: not inlinable.\n");
1726 return 0;
1728 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
1730 if (dump_file)
1731 fprintf (dump_file, "Not splitting: disregarding inline limits.\n");
1732 return 0;
1734 /* This can be relaxed; most of versioning tests actually prevents
1735 a duplication. */
1736 if (!tree_versionable_function_p (current_function_decl))
1738 if (dump_file)
1739 fprintf (dump_file, "Not splitting: not versionable.\n");
1740 return 0;
1742 /* FIXME: we could support this. */
1743 if (DECL_STRUCT_FUNCTION (current_function_decl)->static_chain_decl)
1745 if (dump_file)
1746 fprintf (dump_file, "Not splitting: nested function.\n");
1747 return 0;
1750 /* See if it makes sense to try to split.
1751 It makes sense to split if we inline, that is if we have direct calls to
1752 handle or direct calls are possibly going to appear as result of indirect
1753 inlining or LTO. Also handle -fprofile-generate as LTO to allow non-LTO
1754 training for LTO -fprofile-use build.
1756 Note that we are not completely conservative about disqualifying functions
1757 called once. It is possible that the caller is called more then once and
1758 then inlining would still benefit. */
1759 if ((!node->callers
1760 /* Local functions called once will be completely inlined most of time. */
1761 || (!node->callers->next_caller && node->local))
1762 && !node->address_taken
1763 && !node->has_aliases_p ()
1764 && (!flag_lto || !node->externally_visible))
1766 if (dump_file)
1767 fprintf (dump_file, "Not splitting: not called directly "
1768 "or called once.\n");
1769 return 0;
1772 /* FIXME: We can actually split if splitting reduces call overhead. */
1773 if (!flag_inline_small_functions
1774 && !DECL_DECLARED_INLINE_P (current_function_decl))
1776 if (dump_file)
1777 fprintf (dump_file, "Not splitting: not autoinlining and function"
1778 " is not inline.\n");
1779 return 0;
1782 if (lookup_attribute ("noinline", DECL_ATTRIBUTES (current_function_decl)))
1784 if (dump_file)
1785 fprintf (dump_file, "Not splitting: function is noinline.\n");
1786 return 0;
1788 if (lookup_attribute ("section", DECL_ATTRIBUTES (current_function_decl)))
1790 if (dump_file)
1791 fprintf (dump_file, "Not splitting: function is in user defined "
1792 "section.\n");
1793 return 0;
1796 /* We enforce splitting after loop headers when profile info is not
1797 available. */
1798 if (profile_status_for_fn (cfun) != PROFILE_READ)
1799 mark_dfs_back_edges ();
1801 /* Initialize bitmap to track forbidden calls. */
1802 forbidden_dominators = BITMAP_ALLOC (NULL);
1803 calculate_dominance_info (CDI_DOMINATORS);
1805 /* Compute local info about basic blocks and determine function size/time. */
1806 bb_info_vec.safe_grow_cleared (last_basic_block_for_fn (cfun) + 1, true);
1807 best_split_point.split_bbs = NULL;
1808 basic_block return_bb = find_return_bb ();
1809 int tsan_exit_found = -1;
1810 FOR_EACH_BB_FN (bb, cfun)
1812 sreal time = 0;
1813 int size = 0;
1814 sreal freq = bb->count.to_sreal_scale
1815 (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
1817 if (dump_file && (dump_flags & TDF_DETAILS))
1818 fprintf (dump_file, "Basic block %i\n", bb->index);
1820 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1822 sreal this_time;
1823 int this_size;
1824 gimple *stmt = gsi_stmt (bsi);
1826 this_size = estimate_num_insns (stmt, &eni_size_weights);
1827 this_time = (sreal)estimate_num_insns (stmt, &eni_time_weights)
1828 * freq;
1829 size += this_size;
1830 time += this_time;
1831 check_forbidden_calls (stmt);
1833 if (dump_file && (dump_flags & TDF_DETAILS))
1835 fprintf (dump_file, " freq:%4.2f size:%3i time:%4.2f ",
1836 freq.to_double (), this_size, this_time.to_double ());
1837 print_gimple_stmt (dump_file, stmt, 0);
1840 if ((flag_sanitize & SANITIZE_THREAD)
1841 && gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT))
1843 /* We handle TSAN_FUNC_EXIT for splitting either in the
1844 return_bb, or in its immediate predecessors. */
1845 if ((bb != return_bb && !find_edge (bb, return_bb))
1846 || (tsan_exit_found != -1
1847 && tsan_exit_found != (bb != return_bb)))
1849 if (dump_file)
1850 fprintf (dump_file, "Not splitting: TSAN_FUNC_EXIT"
1851 " in unexpected basic block.\n");
1852 BITMAP_FREE (forbidden_dominators);
1853 bb_info_vec.release ();
1854 return 0;
1856 tsan_exit_found = bb != return_bb;
1859 overall_time += time;
1860 overall_size += size;
1861 bb_info_vec[bb->index].time = time;
1862 bb_info_vec[bb->index].size = size;
1864 find_split_points (return_bb, overall_time, overall_size);
1865 if (best_split_point.split_bbs)
1867 split_function (return_bb, &best_split_point, tsan_exit_found == 1);
1868 BITMAP_FREE (best_split_point.ssa_names_to_pass);
1869 BITMAP_FREE (best_split_point.split_bbs);
1870 todo = TODO_update_ssa | TODO_cleanup_cfg;
1872 BITMAP_FREE (forbidden_dominators);
1873 bb_info_vec.release ();
1874 return todo;
1877 namespace {
1879 const pass_data pass_data_split_functions =
1881 GIMPLE_PASS, /* type */
1882 "fnsplit", /* name */
1883 OPTGROUP_NONE, /* optinfo_flags */
1884 TV_IPA_FNSPLIT, /* tv_id */
1885 PROP_cfg, /* properties_required */
1886 0, /* properties_provided */
1887 0, /* properties_destroyed */
1888 0, /* todo_flags_start */
1889 0, /* todo_flags_finish */
1892 class pass_split_functions : public gimple_opt_pass
1894 public:
1895 pass_split_functions (gcc::context *ctxt)
1896 : gimple_opt_pass (pass_data_split_functions, ctxt)
1899 /* opt_pass methods: */
1900 virtual bool gate (function *);
1901 virtual unsigned int execute (function *)
1903 return execute_split_functions ();
1906 }; // class pass_split_functions
1908 bool
1909 pass_split_functions::gate (function *)
1911 /* When doing profile feedback, we want to execute the pass after profiling
1912 is read. So disable one in early optimization. */
1913 return (flag_partial_inlining
1914 && !profile_arc_flag && !flag_branch_probabilities);
1917 } // anon namespace
1919 gimple_opt_pass *
1920 make_pass_split_functions (gcc::context *ctxt)
1922 return new pass_split_functions (ctxt);
1925 /* Execute function splitting pass. */
1927 static unsigned int
1928 execute_feedback_split_functions (void)
1930 unsigned int retval = execute_split_functions ();
1931 if (retval)
1932 retval |= TODO_rebuild_cgraph_edges;
1933 return retval;
1936 namespace {
1938 const pass_data pass_data_feedback_split_functions =
1940 GIMPLE_PASS, /* type */
1941 "feedback_fnsplit", /* name */
1942 OPTGROUP_NONE, /* optinfo_flags */
1943 TV_IPA_FNSPLIT, /* tv_id */
1944 PROP_cfg, /* properties_required */
1945 0, /* properties_provided */
1946 0, /* properties_destroyed */
1947 0, /* todo_flags_start */
1948 0, /* todo_flags_finish */
1951 class pass_feedback_split_functions : public gimple_opt_pass
1953 public:
1954 pass_feedback_split_functions (gcc::context *ctxt)
1955 : gimple_opt_pass (pass_data_feedback_split_functions, ctxt)
1958 /* opt_pass methods: */
1959 virtual bool gate (function *);
1960 virtual unsigned int execute (function *)
1962 return execute_feedback_split_functions ();
1965 }; // class pass_feedback_split_functions
1967 bool
1968 pass_feedback_split_functions::gate (function *)
1970 /* We don't need to split when profiling at all, we are producing
1971 lousy code anyway. */
1972 return (flag_partial_inlining
1973 && flag_branch_probabilities);
1976 } // anon namespace
1978 gimple_opt_pass *
1979 make_pass_feedback_split_functions (gcc::context *ctxt)
1981 return new pass_feedback_split_functions (ctxt);