* tree-vect-loop-manip.c (vect_do_peeling): Do not use
[official-gcc.git] / gcc / ipa-split.c
blob9f893915c1749b5121f741f912a73290d21da330
1 /* Function splitting pass
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka <jh@suse.cz>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* The purpose of this pass is to split function bodies to improve
22 inlining. I.e. for function of the form:
24 func (...)
26 if (cheap_test)
27 something_small
28 else
29 something_big
32 Produce:
34 func.part (...)
36 something_big
39 func (...)
41 if (cheap_test)
42 something_small
43 else
44 func.part (...);
47 When func becomes inlinable and when cheap_test is often true, inlining func,
48 but not fund.part leads to performance improvement similar as inlining
49 original func while the code size growth is smaller.
51 The pass is organized in three stages:
52 1) Collect local info about basic block into BB_INFO structure and
53 compute function body estimated size and time.
54 2) Via DFS walk find all possible basic blocks where we can split
55 and chose best one.
56 3) If split point is found, split at the specified BB by creating a clone
57 and updating function to call it.
59 The decisions what functions to split are in execute_split_functions
60 and consider_split.
62 There are several possible future improvements for this pass including:
64 1) Splitting to break up large functions
65 2) Splitting to reduce stack frame usage
66 3) Allow split part of function to use values computed in the header part.
67 The values needs to be passed to split function, perhaps via same
68 interface as for nested functions or as argument.
69 4) Support for simple rematerialization. I.e. when split part use
70 value computed in header from function parameter in very cheap way, we
71 can just recompute it.
72 5) Support splitting of nested functions.
73 6) Support non-SSA arguments.
74 7) There is nothing preventing us from producing multiple parts of single function
75 when needed or splitting also the parts. */
77 #include "config.h"
78 #include "system.h"
79 #include "coretypes.h"
80 #include "backend.h"
81 #include "rtl.h"
82 #include "tree.h"
83 #include "gimple.h"
84 #include "cfghooks.h"
85 #include "alloc-pool.h"
86 #include "tree-pass.h"
87 #include "ssa.h"
88 #include "cgraph.h"
89 #include "diagnostic.h"
90 #include "fold-const.h"
91 #include "cfganal.h"
92 #include "calls.h"
93 #include "gimplify.h"
94 #include "gimple-iterator.h"
95 #include "gimplify-me.h"
96 #include "gimple-walk.h"
97 #include "symbol-summary.h"
98 #include "ipa-prop.h"
99 #include "tree-cfg.h"
100 #include "tree-into-ssa.h"
101 #include "tree-dfa.h"
102 #include "tree-inline.h"
103 #include "params.h"
104 #include "gimple-pretty-print.h"
105 #include "ipa-fnsummary.h"
106 #include "cfgloop.h"
107 #include "tree-chkp.h"
109 /* Per basic block info. */
111 struct split_bb_info
113 unsigned int size;
114 unsigned int time;
117 static vec<split_bb_info> bb_info_vec;
119 /* Description of split point. */
121 struct split_point
123 /* Size of the partitions. */
124 unsigned int header_time, header_size, split_time, split_size;
126 /* SSA names that need to be passed into spit function. */
127 bitmap ssa_names_to_pass;
129 /* Basic block where we split (that will become entry point of new function. */
130 basic_block entry_bb;
132 /* Count for entering the split part.
133 This is not count of the entry_bb because it may be in loop. */
134 profile_count count;
136 /* Basic blocks we are splitting away. */
137 bitmap split_bbs;
139 /* True when return value is computed on split part and thus it needs
140 to be returned. */
141 bool split_part_set_retval;
144 /* Best split point found. */
146 struct split_point best_split_point;
148 /* Set of basic blocks that are not allowed to dominate a split point. */
150 static bitmap forbidden_dominators;
152 static tree find_retval (basic_block return_bb);
153 static tree find_retbnd (basic_block return_bb);
155 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
156 variable, check it if it is present in bitmap passed via DATA. */
158 static bool
159 test_nonssa_use (gimple *, tree t, tree, void *data)
161 t = get_base_address (t);
163 if (!t || is_gimple_reg (t))
164 return false;
166 if (TREE_CODE (t) == PARM_DECL
167 || (VAR_P (t)
168 && auto_var_in_fn_p (t, current_function_decl))
169 || TREE_CODE (t) == RESULT_DECL
170 /* Normal labels are part of CFG and will be handled gratefuly.
171 Forced labels however can be used directly by statements and
172 need to stay in one partition along with their uses. */
173 || (TREE_CODE (t) == LABEL_DECL
174 && FORCED_LABEL (t)))
175 return bitmap_bit_p ((bitmap)data, DECL_UID (t));
177 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
178 to pretend that the value pointed to is actual result decl. */
179 if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
180 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
181 && SSA_NAME_VAR (TREE_OPERAND (t, 0))
182 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
183 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
184 return
185 bitmap_bit_p ((bitmap)data,
186 DECL_UID (DECL_RESULT (current_function_decl)));
188 return false;
191 /* Dump split point CURRENT. */
193 static void
194 dump_split_point (FILE * file, struct split_point *current)
196 fprintf (file,
197 "Split point at BB %i\n"
198 " header time: %i header size: %i\n"
199 " split time: %i split size: %i\n bbs: ",
200 current->entry_bb->index, current->header_time,
201 current->header_size, current->split_time, current->split_size);
202 dump_bitmap (file, current->split_bbs);
203 fprintf (file, " SSA names to pass: ");
204 dump_bitmap (file, current->ssa_names_to_pass);
207 /* Look for all BBs in header that might lead to the split part and verify
208 that they are not defining any non-SSA var used by the split part.
209 Parameters are the same as for consider_split. */
211 static bool
212 verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
213 basic_block return_bb)
215 bitmap seen = BITMAP_ALLOC (NULL);
216 vec<basic_block> worklist = vNULL;
217 edge e;
218 edge_iterator ei;
219 bool ok = true;
220 basic_block bb;
222 FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
223 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
224 && !bitmap_bit_p (current->split_bbs, e->src->index))
226 worklist.safe_push (e->src);
227 bitmap_set_bit (seen, e->src->index);
230 while (!worklist.is_empty ())
232 bb = worklist.pop ();
233 FOR_EACH_EDGE (e, ei, bb->preds)
234 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
235 && bitmap_set_bit (seen, e->src->index))
237 gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
238 e->src->index));
239 worklist.safe_push (e->src);
241 for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
242 gsi_next (&bsi))
244 gimple *stmt = gsi_stmt (bsi);
245 if (is_gimple_debug (stmt))
246 continue;
247 if (walk_stmt_load_store_addr_ops
248 (stmt, non_ssa_vars, test_nonssa_use, test_nonssa_use,
249 test_nonssa_use))
251 ok = false;
252 goto done;
254 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
255 if (test_nonssa_use (stmt, gimple_label_label (label_stmt),
256 NULL_TREE, non_ssa_vars))
258 ok = false;
259 goto done;
262 for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
263 gsi_next (&bsi))
265 if (walk_stmt_load_store_addr_ops
266 (gsi_stmt (bsi), non_ssa_vars, test_nonssa_use, test_nonssa_use,
267 test_nonssa_use))
269 ok = false;
270 goto done;
273 FOR_EACH_EDGE (e, ei, bb->succs)
275 if (e->dest != return_bb)
276 continue;
277 for (gphi_iterator bsi = gsi_start_phis (return_bb);
278 !gsi_end_p (bsi);
279 gsi_next (&bsi))
281 gphi *stmt = bsi.phi ();
282 tree op = gimple_phi_arg_def (stmt, e->dest_idx);
284 if (virtual_operand_p (gimple_phi_result (stmt)))
285 continue;
286 if (TREE_CODE (op) != SSA_NAME
287 && test_nonssa_use (stmt, op, op, non_ssa_vars))
289 ok = false;
290 goto done;
296 /* Verify that the rest of function does not define any label
297 used by the split part. */
298 FOR_EACH_BB_FN (bb, cfun)
299 if (!bitmap_bit_p (current->split_bbs, bb->index)
300 && !bitmap_bit_p (seen, bb->index))
302 gimple_stmt_iterator bsi;
303 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
304 if (glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (bsi)))
306 if (test_nonssa_use (label_stmt,
307 gimple_label_label (label_stmt),
308 NULL_TREE, non_ssa_vars))
310 ok = false;
311 goto done;
314 else
315 break;
318 done:
319 BITMAP_FREE (seen);
320 worklist.release ();
321 return ok;
324 /* If STMT is a call, check the callee against a list of forbidden
325 predicate functions. If a match is found, look for uses of the
326 call result in condition statements that compare against zero.
327 For each such use, find the block targeted by the condition
328 statement for the nonzero result, and set the bit for this block
329 in the forbidden dominators bitmap. The purpose of this is to avoid
330 selecting a split point where we are likely to lose the chance
331 to optimize away an unused function call. */
333 static void
334 check_forbidden_calls (gimple *stmt)
336 imm_use_iterator use_iter;
337 use_operand_p use_p;
338 tree lhs;
340 /* At the moment, __builtin_constant_p is the only forbidden
341 predicate function call (see PR49642). */
342 if (!gimple_call_builtin_p (stmt, BUILT_IN_CONSTANT_P))
343 return;
345 lhs = gimple_call_lhs (stmt);
347 if (!lhs || TREE_CODE (lhs) != SSA_NAME)
348 return;
350 FOR_EACH_IMM_USE_FAST (use_p, use_iter, lhs)
352 tree op1;
353 basic_block use_bb, forbidden_bb;
354 enum tree_code code;
355 edge true_edge, false_edge;
356 gcond *use_stmt;
358 use_stmt = dyn_cast <gcond *> (USE_STMT (use_p));
359 if (!use_stmt)
360 continue;
362 /* Assuming canonical form for GIMPLE_COND here, with constant
363 in second position. */
364 op1 = gimple_cond_rhs (use_stmt);
365 code = gimple_cond_code (use_stmt);
366 use_bb = gimple_bb (use_stmt);
368 extract_true_false_edges_from_block (use_bb, &true_edge, &false_edge);
370 /* We're only interested in comparisons that distinguish
371 unambiguously from zero. */
372 if (!integer_zerop (op1) || code == LE_EXPR || code == GE_EXPR)
373 continue;
375 if (code == EQ_EXPR)
376 forbidden_bb = false_edge->dest;
377 else
378 forbidden_bb = true_edge->dest;
380 bitmap_set_bit (forbidden_dominators, forbidden_bb->index);
384 /* If BB is dominated by any block in the forbidden dominators set,
385 return TRUE; else FALSE. */
387 static bool
388 dominated_by_forbidden (basic_block bb)
390 unsigned dom_bb;
391 bitmap_iterator bi;
393 EXECUTE_IF_SET_IN_BITMAP (forbidden_dominators, 1, dom_bb, bi)
395 if (dominated_by_p (CDI_DOMINATORS, bb,
396 BASIC_BLOCK_FOR_FN (cfun, dom_bb)))
397 return true;
400 return false;
403 /* For give split point CURRENT and return block RETURN_BB return 1
404 if ssa name VAL is set by split part and 0 otherwise. */
405 static bool
406 split_part_set_ssa_name_p (tree val, struct split_point *current,
407 basic_block return_bb)
409 if (TREE_CODE (val) != SSA_NAME)
410 return false;
412 return (!SSA_NAME_IS_DEFAULT_DEF (val)
413 && (bitmap_bit_p (current->split_bbs,
414 gimple_bb (SSA_NAME_DEF_STMT (val))->index)
415 || gimple_bb (SSA_NAME_DEF_STMT (val)) == return_bb));
418 /* We found an split_point CURRENT. NON_SSA_VARS is bitmap of all non ssa
419 variables used and RETURN_BB is return basic block.
420 See if we can split function here. */
422 static void
423 consider_split (struct split_point *current, bitmap non_ssa_vars,
424 basic_block return_bb)
426 tree parm;
427 unsigned int num_args = 0;
428 unsigned int call_overhead;
429 edge e;
430 edge_iterator ei;
431 gphi_iterator bsi;
432 unsigned int i;
433 tree retval;
434 tree retbnd;
435 bool back_edge = false;
437 if (dump_file && (dump_flags & TDF_DETAILS))
438 dump_split_point (dump_file, current);
440 current->count = profile_count::zero ();
441 FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
443 if (e->flags & EDGE_DFS_BACK)
444 back_edge = true;
445 if (!bitmap_bit_p (current->split_bbs, e->src->index))
446 current->count += e->count ();
449 /* Do not split when we would end up calling function anyway.
450 Compares are three state, use !(...<...) to also give up when outcome
451 is unknown. */
452 if (!(current->count
453 < (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale
454 (PARAM_VALUE (PARAM_PARTIAL_INLINING_ENTRY_PROBABILITY), 100))))
456 /* When profile is guessed, we can not expect it to give us
457 realistic estimate on likelyness of function taking the
458 complex path. As a special case, when tail of the function is
459 a loop, enable splitting since inlining code skipping the loop
460 is likely noticeable win. */
461 if (back_edge
462 && profile_status_for_fn (cfun) != PROFILE_READ
463 && current->count
464 < ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
466 if (dump_file && (dump_flags & TDF_DETAILS))
468 fprintf (dump_file,
469 " Split before loop, accepting despite low counts");
470 current->count.dump (dump_file);
471 fprintf (dump_file, " ");
472 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.dump (dump_file);
475 else
477 if (dump_file && (dump_flags & TDF_DETAILS))
478 fprintf (dump_file,
479 " Refused: incoming frequency is too large.\n");
480 return;
484 if (!current->header_size)
486 if (dump_file && (dump_flags & TDF_DETAILS))
487 fprintf (dump_file, " Refused: header empty\n");
488 return;
491 /* Verify that PHI args on entry are either virtual or all their operands
492 incoming from header are the same. */
493 for (bsi = gsi_start_phis (current->entry_bb); !gsi_end_p (bsi); gsi_next (&bsi))
495 gphi *stmt = bsi.phi ();
496 tree val = NULL;
498 if (virtual_operand_p (gimple_phi_result (stmt)))
499 continue;
500 for (i = 0; i < gimple_phi_num_args (stmt); i++)
502 edge e = gimple_phi_arg_edge (stmt, i);
503 if (!bitmap_bit_p (current->split_bbs, e->src->index))
505 tree edge_val = gimple_phi_arg_def (stmt, i);
506 if (val && edge_val != val)
508 if (dump_file && (dump_flags & TDF_DETAILS))
509 fprintf (dump_file,
510 " Refused: entry BB has PHI with multiple variants\n");
511 return;
513 val = edge_val;
519 /* See what argument we will pass to the split function and compute
520 call overhead. */
521 call_overhead = eni_size_weights.call_cost;
522 for (parm = DECL_ARGUMENTS (current_function_decl); parm;
523 parm = DECL_CHAIN (parm))
525 if (!is_gimple_reg (parm))
527 if (bitmap_bit_p (non_ssa_vars, DECL_UID (parm)))
529 if (dump_file && (dump_flags & TDF_DETAILS))
530 fprintf (dump_file,
531 " Refused: need to pass non-ssa param values\n");
532 return;
535 else
537 tree ddef = ssa_default_def (cfun, parm);
538 if (ddef
539 && bitmap_bit_p (current->ssa_names_to_pass,
540 SSA_NAME_VERSION (ddef)))
542 if (!VOID_TYPE_P (TREE_TYPE (parm)))
543 call_overhead += estimate_move_cost (TREE_TYPE (parm), false);
544 num_args++;
548 if (!VOID_TYPE_P (TREE_TYPE (current_function_decl)))
549 call_overhead += estimate_move_cost (TREE_TYPE (current_function_decl),
550 false);
552 if (current->split_size <= call_overhead)
554 if (dump_file && (dump_flags & TDF_DETAILS))
555 fprintf (dump_file,
556 " Refused: split size is smaller than call overhead\n");
557 return;
559 if (current->header_size + call_overhead
560 >= (unsigned int)(DECL_DECLARED_INLINE_P (current_function_decl)
561 ? MAX_INLINE_INSNS_SINGLE
562 : MAX_INLINE_INSNS_AUTO))
564 if (dump_file && (dump_flags & TDF_DETAILS))
565 fprintf (dump_file,
566 " Refused: header size is too large for inline candidate\n");
567 return;
570 /* Splitting functions brings the target out of comdat group; this will
571 lead to code duplication if the function is reused by other unit.
572 Limit this duplication. This is consistent with limit in tree-sra.c
573 FIXME: with LTO we ought to be able to do better! */
574 if (DECL_ONE_ONLY (current_function_decl)
575 && current->split_size >= (unsigned int) MAX_INLINE_INSNS_AUTO)
577 if (dump_file && (dump_flags & TDF_DETAILS))
578 fprintf (dump_file,
579 " Refused: function is COMDAT and tail is too large\n");
580 return;
582 /* For comdat functions also reject very small tails; those will likely get
583 inlined back and we do not want to risk the duplication overhead.
584 FIXME: with LTO we ought to be able to do better! */
585 if (DECL_ONE_ONLY (current_function_decl)
586 && current->split_size
587 <= (unsigned int) PARAM_VALUE (PARAM_EARLY_INLINING_INSNS) / 2)
589 if (dump_file && (dump_flags & TDF_DETAILS))
590 fprintf (dump_file,
591 " Refused: function is COMDAT and tail is too small\n");
592 return;
595 /* FIXME: we currently can pass only SSA function parameters to the split
596 arguments. Once parm_adjustment infrastructure is supported by cloning,
597 we can pass more than that. */
598 if (num_args != bitmap_count_bits (current->ssa_names_to_pass))
601 if (dump_file && (dump_flags & TDF_DETAILS))
602 fprintf (dump_file,
603 " Refused: need to pass non-param values\n");
604 return;
607 /* When there are non-ssa vars used in the split region, see if they
608 are used in the header region. If so, reject the split.
609 FIXME: we can use nested function support to access both. */
610 if (!bitmap_empty_p (non_ssa_vars)
611 && !verify_non_ssa_vars (current, non_ssa_vars, return_bb))
613 if (dump_file && (dump_flags & TDF_DETAILS))
614 fprintf (dump_file,
615 " Refused: split part has non-ssa uses\n");
616 return;
619 /* If the split point is dominated by a forbidden block, reject
620 the split. */
621 if (!bitmap_empty_p (forbidden_dominators)
622 && dominated_by_forbidden (current->entry_bb))
624 if (dump_file && (dump_flags & TDF_DETAILS))
625 fprintf (dump_file,
626 " Refused: split point dominated by forbidden block\n");
627 return;
630 /* See if retval used by return bb is computed by header or split part.
631 When it is computed by split part, we need to produce return statement
632 in the split part and add code to header to pass it around.
634 This is bit tricky to test:
635 1) When there is no return_bb or no return value, we always pass
636 value around.
637 2) Invariants are always computed by caller.
638 3) For SSA we need to look if defining statement is in header or split part
639 4) For non-SSA we need to look where the var is computed. */
640 retval = find_retval (return_bb);
641 if (!retval)
643 /* If there is a return_bb with no return value in function returning
644 value by reference, also make the split part return void, otherwise
645 we expansion would try to create a non-POD temporary, which is
646 invalid. */
647 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
648 && DECL_RESULT (current_function_decl)
649 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
650 current->split_part_set_retval = false;
651 else
652 current->split_part_set_retval = true;
654 else if (is_gimple_min_invariant (retval))
655 current->split_part_set_retval = false;
656 /* Special case is value returned by reference we record as if it was non-ssa
657 set to result_decl. */
658 else if (TREE_CODE (retval) == SSA_NAME
659 && SSA_NAME_VAR (retval)
660 && TREE_CODE (SSA_NAME_VAR (retval)) == RESULT_DECL
661 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
662 current->split_part_set_retval
663 = bitmap_bit_p (non_ssa_vars, DECL_UID (SSA_NAME_VAR (retval)));
664 else if (TREE_CODE (retval) == SSA_NAME)
665 current->split_part_set_retval
666 = split_part_set_ssa_name_p (retval, current, return_bb);
667 else if (TREE_CODE (retval) == PARM_DECL)
668 current->split_part_set_retval = false;
669 else if (VAR_P (retval)
670 || TREE_CODE (retval) == RESULT_DECL)
671 current->split_part_set_retval
672 = bitmap_bit_p (non_ssa_vars, DECL_UID (retval));
673 else
674 current->split_part_set_retval = true;
676 /* See if retbnd used by return bb is computed by header or split part. */
677 retbnd = find_retbnd (return_bb);
678 if (retbnd)
680 bool split_part_set_retbnd
681 = split_part_set_ssa_name_p (retbnd, current, return_bb);
683 /* If we have both return value and bounds then keep their definitions
684 in a single function. We use SSA names to link returned bounds and
685 value and therefore do not handle cases when result is passed by
686 reference (which should not be our case anyway since bounds are
687 returned for pointers only). */
688 if ((DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))
689 && current->split_part_set_retval)
690 || split_part_set_retbnd != current->split_part_set_retval)
692 if (dump_file && (dump_flags & TDF_DETAILS))
693 fprintf (dump_file,
694 " Refused: split point splits return value and bounds\n");
695 return;
699 /* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
700 for the return value. If there are other PHIs, give up. */
701 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
703 gphi_iterator psi;
705 for (psi = gsi_start_phis (return_bb); !gsi_end_p (psi); gsi_next (&psi))
706 if (!virtual_operand_p (gimple_phi_result (psi.phi ()))
707 && !(retval
708 && current->split_part_set_retval
709 && TREE_CODE (retval) == SSA_NAME
710 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))
711 && SSA_NAME_DEF_STMT (retval) == psi.phi ()))
713 if (dump_file && (dump_flags & TDF_DETAILS))
714 fprintf (dump_file,
715 " Refused: return bb has extra PHIs\n");
716 return;
720 if (dump_file && (dump_flags & TDF_DETAILS))
721 fprintf (dump_file, " Accepted!\n");
723 /* At the moment chose split point with lowest count and that leaves
724 out smallest size of header.
725 In future we might re-consider this heuristics. */
726 if (!best_split_point.split_bbs
727 || best_split_point.count
728 > current->count
729 || (best_split_point.count == current->count
730 && best_split_point.split_size < current->split_size))
733 if (dump_file && (dump_flags & TDF_DETAILS))
734 fprintf (dump_file, " New best split point!\n");
735 if (best_split_point.ssa_names_to_pass)
737 BITMAP_FREE (best_split_point.ssa_names_to_pass);
738 BITMAP_FREE (best_split_point.split_bbs);
740 best_split_point = *current;
741 best_split_point.ssa_names_to_pass = BITMAP_ALLOC (NULL);
742 bitmap_copy (best_split_point.ssa_names_to_pass,
743 current->ssa_names_to_pass);
744 best_split_point.split_bbs = BITMAP_ALLOC (NULL);
745 bitmap_copy (best_split_point.split_bbs, current->split_bbs);
749 /* Return basic block containing RETURN statement. We allow basic blocks
750 of the form:
751 <retval> = tmp_var;
752 return <retval>
753 but return_bb can not be more complex than this (except for
754 -fsanitize=thread we allow TSAN_FUNC_EXIT () internal call in there).
755 If nothing is found, return the exit block.
757 When there are multiple RETURN statement, chose one with return value,
758 since that one is more likely shared by multiple code paths.
760 Return BB is special, because for function splitting it is the only
761 basic block that is duplicated in between header and split part of the
762 function.
764 TODO: We might support multiple return blocks. */
766 static basic_block
767 find_return_bb (void)
769 edge e;
770 basic_block return_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
771 gimple_stmt_iterator bsi;
772 bool found_return = false;
773 tree retval = NULL_TREE;
775 if (!single_pred_p (EXIT_BLOCK_PTR_FOR_FN (cfun)))
776 return return_bb;
778 e = single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun));
779 for (bsi = gsi_last_bb (e->src); !gsi_end_p (bsi); gsi_prev (&bsi))
781 gimple *stmt = gsi_stmt (bsi);
782 if (gimple_code (stmt) == GIMPLE_LABEL
783 || is_gimple_debug (stmt)
784 || gimple_clobber_p (stmt))
786 else if (gimple_code (stmt) == GIMPLE_ASSIGN
787 && found_return
788 && gimple_assign_single_p (stmt)
789 && (auto_var_in_fn_p (gimple_assign_rhs1 (stmt),
790 current_function_decl)
791 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
792 && retval == gimple_assign_lhs (stmt))
794 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
796 found_return = true;
797 retval = gimple_return_retval (return_stmt);
799 /* For -fsanitize=thread, allow also TSAN_FUNC_EXIT () in the return
800 bb. */
801 else if ((flag_sanitize & SANITIZE_THREAD)
802 && gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT))
804 else
805 break;
807 if (gsi_end_p (bsi) && found_return)
808 return_bb = e->src;
810 return return_bb;
813 /* Given return basic block RETURN_BB, see where return value is really
814 stored. */
815 static tree
816 find_retval (basic_block return_bb)
818 gimple_stmt_iterator bsi;
819 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi); gsi_next (&bsi))
820 if (greturn *return_stmt = dyn_cast <greturn *> (gsi_stmt (bsi)))
821 return gimple_return_retval (return_stmt);
822 else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
823 && !gimple_clobber_p (gsi_stmt (bsi)))
824 return gimple_assign_rhs1 (gsi_stmt (bsi));
825 return NULL;
828 /* Given return basic block RETURN_BB, see where return bounds are really
829 stored. */
830 static tree
831 find_retbnd (basic_block return_bb)
833 gimple_stmt_iterator bsi;
834 for (bsi = gsi_last_bb (return_bb); !gsi_end_p (bsi); gsi_prev (&bsi))
835 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_RETURN)
836 return gimple_return_retbnd (gsi_stmt (bsi));
837 return NULL;
840 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
841 variable, mark it as used in bitmap passed via DATA.
842 Return true when access to T prevents splitting the function. */
844 static bool
845 mark_nonssa_use (gimple *, tree t, tree, void *data)
847 t = get_base_address (t);
849 if (!t || is_gimple_reg (t))
850 return false;
852 /* At present we can't pass non-SSA arguments to split function.
853 FIXME: this can be relaxed by passing references to arguments. */
854 if (TREE_CODE (t) == PARM_DECL)
856 if (dump_file && (dump_flags & TDF_DETAILS))
857 fprintf (dump_file,
858 "Cannot split: use of non-ssa function parameter.\n");
859 return true;
862 if ((VAR_P (t) && auto_var_in_fn_p (t, current_function_decl))
863 || TREE_CODE (t) == RESULT_DECL
864 || (TREE_CODE (t) == LABEL_DECL && FORCED_LABEL (t)))
865 bitmap_set_bit ((bitmap)data, DECL_UID (t));
867 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
868 to pretend that the value pointed to is actual result decl. */
869 if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
870 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
871 && SSA_NAME_VAR (TREE_OPERAND (t, 0))
872 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
873 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
874 return
875 bitmap_bit_p ((bitmap)data,
876 DECL_UID (DECL_RESULT (current_function_decl)));
878 return false;
881 /* Compute local properties of basic block BB we collect when looking for
882 split points. We look for ssa defs and store them in SET_SSA_NAMES,
883 for ssa uses and store them in USED_SSA_NAMES and for any non-SSA automatic
884 vars stored in NON_SSA_VARS.
886 When BB has edge to RETURN_BB, collect uses in RETURN_BB too.
888 Return false when BB contains something that prevents it from being put into
889 split function. */
891 static bool
892 visit_bb (basic_block bb, basic_block return_bb,
893 bitmap set_ssa_names, bitmap used_ssa_names,
894 bitmap non_ssa_vars)
896 edge e;
897 edge_iterator ei;
898 bool can_split = true;
900 for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
901 gsi_next (&bsi))
903 gimple *stmt = gsi_stmt (bsi);
904 tree op;
905 ssa_op_iter iter;
906 tree decl;
908 if (is_gimple_debug (stmt))
909 continue;
911 if (gimple_clobber_p (stmt))
912 continue;
914 /* FIXME: We can split regions containing EH. We can not however
915 split RESX, EH_DISPATCH and EH_POINTER referring to same region
916 into different partitions. This would require tracking of
917 EH regions and checking in consider_split_point if they
918 are not used elsewhere. */
919 if (gimple_code (stmt) == GIMPLE_RESX)
921 if (dump_file && (dump_flags & TDF_DETAILS))
922 fprintf (dump_file, "Cannot split: resx.\n");
923 can_split = false;
925 if (gimple_code (stmt) == GIMPLE_EH_DISPATCH)
927 if (dump_file && (dump_flags & TDF_DETAILS))
928 fprintf (dump_file, "Cannot split: eh dispatch.\n");
929 can_split = false;
932 /* Check builtins that prevent splitting. */
933 if (gimple_code (stmt) == GIMPLE_CALL
934 && (decl = gimple_call_fndecl (stmt)) != NULL_TREE
935 && DECL_BUILT_IN (decl)
936 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
937 switch (DECL_FUNCTION_CODE (decl))
939 /* FIXME: once we will allow passing non-parm values to split part,
940 we need to be sure to handle correct builtin_stack_save and
941 builtin_stack_restore. At the moment we are safe; there is no
942 way to store builtin_stack_save result in non-SSA variable
943 since all calls to those are compiler generated. */
944 case BUILT_IN_APPLY:
945 case BUILT_IN_APPLY_ARGS:
946 case BUILT_IN_VA_START:
947 if (dump_file && (dump_flags & TDF_DETAILS))
948 fprintf (dump_file,
949 "Cannot split: builtin_apply and va_start.\n");
950 can_split = false;
951 break;
952 case BUILT_IN_EH_POINTER:
953 if (dump_file && (dump_flags & TDF_DETAILS))
954 fprintf (dump_file, "Cannot split: builtin_eh_pointer.\n");
955 can_split = false;
956 break;
957 default:
958 break;
961 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
962 bitmap_set_bit (set_ssa_names, SSA_NAME_VERSION (op));
963 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
964 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
965 can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
966 mark_nonssa_use,
967 mark_nonssa_use,
968 mark_nonssa_use);
970 for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
971 gsi_next (&bsi))
973 gphi *stmt = bsi.phi ();
974 unsigned int i;
976 if (virtual_operand_p (gimple_phi_result (stmt)))
977 continue;
978 bitmap_set_bit (set_ssa_names,
979 SSA_NAME_VERSION (gimple_phi_result (stmt)));
980 for (i = 0; i < gimple_phi_num_args (stmt); i++)
982 tree op = gimple_phi_arg_def (stmt, i);
983 if (TREE_CODE (op) == SSA_NAME)
984 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
986 can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
987 mark_nonssa_use,
988 mark_nonssa_use,
989 mark_nonssa_use);
991 /* Record also uses coming from PHI operand in return BB. */
992 FOR_EACH_EDGE (e, ei, bb->succs)
993 if (e->dest == return_bb)
995 for (gphi_iterator bsi = gsi_start_phis (return_bb);
996 !gsi_end_p (bsi);
997 gsi_next (&bsi))
999 gphi *stmt = bsi.phi ();
1000 tree op = gimple_phi_arg_def (stmt, e->dest_idx);
1002 if (virtual_operand_p (gimple_phi_result (stmt)))
1003 continue;
1004 if (TREE_CODE (op) == SSA_NAME)
1005 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
1006 else
1007 can_split &= !mark_nonssa_use (stmt, op, op, non_ssa_vars);
1010 return can_split;
1013 /* Stack entry for recursive DFS walk in find_split_point. */
1015 struct stack_entry
1017 /* Basic block we are examining. */
1018 basic_block bb;
1020 /* SSA names set and used by the BB and all BBs reachable
1021 from it via DFS walk. */
1022 bitmap set_ssa_names, used_ssa_names;
1023 bitmap non_ssa_vars;
1025 /* All BBS visited from this BB via DFS walk. */
1026 bitmap bbs_visited;
1028 /* Last examined edge in DFS walk. Since we walk unoriented graph,
1029 the value is up to sum of incoming and outgoing edges of BB. */
1030 unsigned int edge_num;
1032 /* Stack entry index of earliest BB reachable from current BB
1033 or any BB visited later in DFS walk. */
1034 int earliest;
1036 /* Overall time and size of all BBs reached from this BB in DFS walk. */
1037 int overall_time, overall_size;
1039 /* When false we can not split on this BB. */
1040 bool can_split;
1044 /* Find all articulations and call consider_split on them.
1045 OVERALL_TIME and OVERALL_SIZE is time and size of the function.
1047 We perform basic algorithm for finding an articulation in a graph
1048 created from CFG by considering it to be an unoriented graph.
1050 The articulation is discovered via DFS walk. We collect earliest
1051 basic block on stack that is reachable via backward edge. Articulation
1052 is any basic block such that there is no backward edge bypassing it.
1053 To reduce stack usage we maintain heap allocated stack in STACK vector.
1054 AUX pointer of BB is set to index it appears in the stack or -1 once
1055 it is visited and popped off the stack.
1057 The algorithm finds articulation after visiting the whole component
1058 reachable by it. This makes it convenient to collect information about
1059 the component used by consider_split. */
1061 static void
1062 find_split_points (basic_block return_bb, int overall_time, int overall_size)
1064 stack_entry first;
1065 vec<stack_entry> stack = vNULL;
1066 basic_block bb;
1067 struct split_point current;
1069 current.header_time = overall_time;
1070 current.header_size = overall_size;
1071 current.split_time = 0;
1072 current.split_size = 0;
1073 current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
1075 first.bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1076 first.edge_num = 0;
1077 first.overall_time = 0;
1078 first.overall_size = 0;
1079 first.earliest = INT_MAX;
1080 first.set_ssa_names = 0;
1081 first.used_ssa_names = 0;
1082 first.non_ssa_vars = 0;
1083 first.bbs_visited = 0;
1084 first.can_split = false;
1085 stack.safe_push (first);
1086 ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = (void *)(intptr_t)-1;
1088 while (!stack.is_empty ())
1090 stack_entry *entry = &stack.last ();
1092 /* We are walking an acyclic graph, so edge_num counts
1093 succ and pred edges together. However when considering
1094 articulation, we want to have processed everything reachable
1095 from articulation but nothing that reaches into it. */
1096 if (entry->edge_num == EDGE_COUNT (entry->bb->succs)
1097 && entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1099 int pos = stack.length ();
1100 entry->can_split &= visit_bb (entry->bb, return_bb,
1101 entry->set_ssa_names,
1102 entry->used_ssa_names,
1103 entry->non_ssa_vars);
1104 if (pos <= entry->earliest && !entry->can_split
1105 && dump_file && (dump_flags & TDF_DETAILS))
1106 fprintf (dump_file,
1107 "found articulation at bb %i but can not split\n",
1108 entry->bb->index);
1109 if (pos <= entry->earliest && entry->can_split)
1111 if (dump_file && (dump_flags & TDF_DETAILS))
1112 fprintf (dump_file, "found articulation at bb %i\n",
1113 entry->bb->index);
1114 current.entry_bb = entry->bb;
1115 current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
1116 bitmap_and_compl (current.ssa_names_to_pass,
1117 entry->used_ssa_names, entry->set_ssa_names);
1118 current.header_time = overall_time - entry->overall_time;
1119 current.header_size = overall_size - entry->overall_size;
1120 current.split_time = entry->overall_time;
1121 current.split_size = entry->overall_size;
1122 current.split_bbs = entry->bbs_visited;
1123 consider_split (&current, entry->non_ssa_vars, return_bb);
1124 BITMAP_FREE (current.ssa_names_to_pass);
1127 /* Do actual DFS walk. */
1128 if (entry->edge_num
1129 < (EDGE_COUNT (entry->bb->succs)
1130 + EDGE_COUNT (entry->bb->preds)))
1132 edge e;
1133 basic_block dest;
1134 if (entry->edge_num < EDGE_COUNT (entry->bb->succs))
1136 e = EDGE_SUCC (entry->bb, entry->edge_num);
1137 dest = e->dest;
1139 else
1141 e = EDGE_PRED (entry->bb, entry->edge_num
1142 - EDGE_COUNT (entry->bb->succs));
1143 dest = e->src;
1146 entry->edge_num++;
1148 /* New BB to visit, push it to the stack. */
1149 if (dest != return_bb && dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1150 && !dest->aux)
1152 stack_entry new_entry;
1154 new_entry.bb = dest;
1155 new_entry.edge_num = 0;
1156 new_entry.overall_time
1157 = bb_info_vec[dest->index].time;
1158 new_entry.overall_size
1159 = bb_info_vec[dest->index].size;
1160 new_entry.earliest = INT_MAX;
1161 new_entry.set_ssa_names = BITMAP_ALLOC (NULL);
1162 new_entry.used_ssa_names = BITMAP_ALLOC (NULL);
1163 new_entry.bbs_visited = BITMAP_ALLOC (NULL);
1164 new_entry.non_ssa_vars = BITMAP_ALLOC (NULL);
1165 new_entry.can_split = true;
1166 bitmap_set_bit (new_entry.bbs_visited, dest->index);
1167 stack.safe_push (new_entry);
1168 dest->aux = (void *)(intptr_t)stack.length ();
1170 /* Back edge found, record the earliest point. */
1171 else if ((intptr_t)dest->aux > 0
1172 && (intptr_t)dest->aux < entry->earliest)
1173 entry->earliest = (intptr_t)dest->aux;
1175 /* We are done with examining the edges. Pop off the value from stack
1176 and merge stuff we accumulate during the walk. */
1177 else if (entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1179 stack_entry *prev = &stack[stack.length () - 2];
1181 entry->bb->aux = (void *)(intptr_t)-1;
1182 prev->can_split &= entry->can_split;
1183 if (prev->set_ssa_names)
1185 bitmap_ior_into (prev->set_ssa_names, entry->set_ssa_names);
1186 bitmap_ior_into (prev->used_ssa_names, entry->used_ssa_names);
1187 bitmap_ior_into (prev->bbs_visited, entry->bbs_visited);
1188 bitmap_ior_into (prev->non_ssa_vars, entry->non_ssa_vars);
1190 if (prev->earliest > entry->earliest)
1191 prev->earliest = entry->earliest;
1192 prev->overall_time += entry->overall_time;
1193 prev->overall_size += entry->overall_size;
1194 BITMAP_FREE (entry->set_ssa_names);
1195 BITMAP_FREE (entry->used_ssa_names);
1196 BITMAP_FREE (entry->bbs_visited);
1197 BITMAP_FREE (entry->non_ssa_vars);
1198 stack.pop ();
1200 else
1201 stack.pop ();
1203 ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = NULL;
1204 FOR_EACH_BB_FN (bb, cfun)
1205 bb->aux = NULL;
1206 stack.release ();
1207 BITMAP_FREE (current.ssa_names_to_pass);
1210 /* Split function at SPLIT_POINT. */
1212 static void
1213 split_function (basic_block return_bb, struct split_point *split_point,
1214 bool add_tsan_func_exit)
1216 vec<tree> args_to_pass = vNULL;
1217 bitmap args_to_skip;
1218 tree parm;
1219 int num = 0;
1220 cgraph_node *node, *cur_node = cgraph_node::get (current_function_decl);
1221 basic_block call_bb;
1222 gcall *call, *tsan_func_exit_call = NULL;
1223 edge e;
1224 edge_iterator ei;
1225 tree retval = NULL, real_retval = NULL, retbnd = NULL;
1226 bool with_bounds = chkp_function_instrumented_p (current_function_decl);
1227 gimple *last_stmt = NULL;
1228 unsigned int i;
1229 tree arg, ddef;
1231 if (dump_file)
1233 fprintf (dump_file, "\n\nSplitting function at:\n");
1234 dump_split_point (dump_file, split_point);
1237 if (cur_node->local.can_change_signature)
1238 args_to_skip = BITMAP_ALLOC (NULL);
1239 else
1240 args_to_skip = NULL;
1242 /* Collect the parameters of new function and args_to_skip bitmap. */
1243 for (parm = DECL_ARGUMENTS (current_function_decl);
1244 parm; parm = DECL_CHAIN (parm), num++)
1245 if (args_to_skip
1246 && (!is_gimple_reg (parm)
1247 || (ddef = ssa_default_def (cfun, parm)) == NULL_TREE
1248 || !bitmap_bit_p (split_point->ssa_names_to_pass,
1249 SSA_NAME_VERSION (ddef))))
1250 bitmap_set_bit (args_to_skip, num);
1251 else
1253 /* This parm might not have been used up to now, but is going to be
1254 used, hence register it. */
1255 if (is_gimple_reg (parm))
1256 arg = get_or_create_ssa_default_def (cfun, parm);
1257 else
1258 arg = parm;
1260 if (!useless_type_conversion_p (DECL_ARG_TYPE (parm), TREE_TYPE (arg)))
1261 arg = fold_convert (DECL_ARG_TYPE (parm), arg);
1262 args_to_pass.safe_push (arg);
1265 /* See if the split function will return. */
1266 bool split_part_return_p = false;
1267 FOR_EACH_EDGE (e, ei, return_bb->preds)
1269 if (bitmap_bit_p (split_point->split_bbs, e->src->index))
1270 split_part_return_p = true;
1273 /* Add return block to what will become the split function.
1274 We do not return; no return block is needed. */
1275 if (!split_part_return_p)
1277 /* We have no return block, so nothing is needed. */
1278 else if (return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1280 /* When we do not want to return value, we need to construct
1281 new return block with empty return statement.
1282 FIXME: Once we are able to change return type, we should change function
1283 to return void instead of just outputting function with undefined return
1284 value. For structures this affects quality of codegen. */
1285 else if ((retval = find_retval (return_bb))
1286 && !split_point->split_part_set_retval)
1288 bool redirected = true;
1289 basic_block new_return_bb = create_basic_block (NULL, 0, return_bb);
1290 gimple_stmt_iterator gsi = gsi_start_bb (new_return_bb);
1291 gsi_insert_after (&gsi, gimple_build_return (NULL), GSI_NEW_STMT);
1292 new_return_bb->count = profile_count::zero ();
1293 while (redirected)
1295 redirected = false;
1296 FOR_EACH_EDGE (e, ei, return_bb->preds)
1297 if (bitmap_bit_p (split_point->split_bbs, e->src->index))
1299 new_return_bb->count += e->count ();
1300 redirect_edge_and_branch (e, new_return_bb);
1301 redirected = true;
1302 break;
1305 e = make_single_succ_edge (new_return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1306 add_bb_to_loop (new_return_bb, current_loops->tree_root);
1307 bitmap_set_bit (split_point->split_bbs, new_return_bb->index);
1308 retbnd = find_retbnd (return_bb);
1310 /* When we pass around the value, use existing return block. */
1311 else
1313 bitmap_set_bit (split_point->split_bbs, return_bb->index);
1314 retbnd = find_retbnd (return_bb);
1317 /* If RETURN_BB has virtual operand PHIs, they must be removed and the
1318 virtual operand marked for renaming as we change the CFG in a way that
1319 tree-inline is not able to compensate for.
1321 Note this can happen whether or not we have a return value. If we have
1322 a return value, then RETURN_BB may have PHIs for real operands too. */
1323 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1325 bool phi_p = false;
1326 for (gphi_iterator gsi = gsi_start_phis (return_bb);
1327 !gsi_end_p (gsi);)
1329 gphi *stmt = gsi.phi ();
1330 if (!virtual_operand_p (gimple_phi_result (stmt)))
1332 gsi_next (&gsi);
1333 continue;
1335 mark_virtual_phi_result_for_renaming (stmt);
1336 remove_phi_node (&gsi, true);
1337 phi_p = true;
1339 /* In reality we have to rename the reaching definition of the
1340 virtual operand at return_bb as we will eventually release it
1341 when we remove the code region we outlined.
1342 So we have to rename all immediate virtual uses of that region
1343 if we didn't see a PHI definition yet. */
1344 /* ??? In real reality we want to set the reaching vdef of the
1345 entry of the SESE region as the vuse of the call and the reaching
1346 vdef of the exit of the SESE region as the vdef of the call. */
1347 if (!phi_p)
1348 for (gimple_stmt_iterator gsi = gsi_start_bb (return_bb);
1349 !gsi_end_p (gsi);
1350 gsi_next (&gsi))
1352 gimple *stmt = gsi_stmt (gsi);
1353 if (gimple_vuse (stmt))
1355 gimple_set_vuse (stmt, NULL_TREE);
1356 update_stmt (stmt);
1358 if (gimple_vdef (stmt))
1359 break;
1363 /* Now create the actual clone. */
1364 cgraph_edge::rebuild_edges ();
1365 node = cur_node->create_version_clone_with_body
1366 (vNULL, NULL, args_to_skip,
1367 !split_part_return_p || !split_point->split_part_set_retval,
1368 split_point->split_bbs, split_point->entry_bb, "part");
1370 node->split_part = true;
1372 if (cur_node->same_comdat_group)
1374 /* TODO: call is versionable if we make sure that all
1375 callers are inside of a comdat group. */
1376 cur_node->calls_comdat_local = 1;
1377 node->add_to_same_comdat_group (cur_node);
1381 /* Let's take a time profile for splitted function. */
1382 node->tp_first_run = cur_node->tp_first_run + 1;
1384 /* For usual cloning it is enough to clear builtin only when signature
1385 changes. For partial inlining we however can not expect the part
1386 of builtin implementation to have same semantic as the whole. */
1387 if (DECL_BUILT_IN (node->decl))
1389 DECL_BUILT_IN_CLASS (node->decl) = NOT_BUILT_IN;
1390 DECL_FUNCTION_CODE (node->decl) = (enum built_in_function) 0;
1393 /* If return_bb contains any clobbers that refer to SSA_NAMEs
1394 set in the split part, remove them. Also reset debug stmts that
1395 refer to SSA_NAMEs set in the split part. */
1396 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1398 gimple_stmt_iterator gsi = gsi_start_bb (return_bb);
1399 while (!gsi_end_p (gsi))
1401 tree op;
1402 ssa_op_iter iter;
1403 gimple *stmt = gsi_stmt (gsi);
1404 bool remove = false;
1405 if (gimple_clobber_p (stmt) || is_gimple_debug (stmt))
1406 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1408 basic_block bb = gimple_bb (SSA_NAME_DEF_STMT (op));
1409 if (op != retval
1410 && bb
1411 && bb != return_bb
1412 && bitmap_bit_p (split_point->split_bbs, bb->index))
1414 if (is_gimple_debug (stmt))
1416 gimple_debug_bind_reset_value (stmt);
1417 update_stmt (stmt);
1419 else
1420 remove = true;
1421 break;
1424 if (remove)
1425 gsi_remove (&gsi, true);
1426 else
1427 gsi_next (&gsi);
1431 /* If the original function is instrumented then it's
1432 part is also instrumented. */
1433 if (with_bounds)
1434 chkp_function_mark_instrumented (node->decl);
1436 /* If the original function is declared inline, there is no point in issuing
1437 a warning for the non-inlinable part. */
1438 DECL_NO_INLINE_WARNING_P (node->decl) = 1;
1439 cur_node->remove_callees ();
1440 cur_node->remove_all_references ();
1441 if (!split_part_return_p)
1442 TREE_THIS_VOLATILE (node->decl) = 1;
1443 if (dump_file)
1444 dump_function_to_file (node->decl, dump_file, dump_flags);
1446 /* Create the basic block we place call into. It is the entry basic block
1447 split after last label. */
1448 call_bb = split_point->entry_bb;
1449 for (gimple_stmt_iterator gsi = gsi_start_bb (call_bb); !gsi_end_p (gsi);)
1450 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
1452 last_stmt = gsi_stmt (gsi);
1453 gsi_next (&gsi);
1455 else
1456 break;
1457 call_bb->count = split_point->count;
1458 e = split_block (split_point->entry_bb, last_stmt);
1459 remove_edge (e);
1461 /* Produce the call statement. */
1462 gimple_stmt_iterator gsi = gsi_last_bb (call_bb);
1463 FOR_EACH_VEC_ELT (args_to_pass, i, arg)
1464 if (!is_gimple_val (arg))
1466 arg = force_gimple_operand_gsi (&gsi, arg, true, NULL_TREE,
1467 false, GSI_CONTINUE_LINKING);
1468 args_to_pass[i] = arg;
1470 call = gimple_build_call_vec (node->decl, args_to_pass);
1471 gimple_call_set_with_bounds (call, with_bounds);
1472 gimple_set_block (call, DECL_INITIAL (current_function_decl));
1473 args_to_pass.release ();
1475 /* For optimized away parameters, add on the caller side
1476 before the call
1477 DEBUG D#X => parm_Y(D)
1478 stmts and associate D#X with parm in decl_debug_args_lookup
1479 vector to say for debug info that if parameter parm had been passed,
1480 it would have value parm_Y(D). */
1481 if (args_to_skip)
1483 vec<tree, va_gc> **debug_args = NULL;
1484 unsigned i = 0, len = 0;
1485 if (MAY_HAVE_DEBUG_STMTS)
1487 debug_args = decl_debug_args_lookup (node->decl);
1488 if (debug_args)
1489 len = vec_safe_length (*debug_args);
1491 for (parm = DECL_ARGUMENTS (current_function_decl), num = 0;
1492 parm; parm = DECL_CHAIN (parm), num++)
1493 if (bitmap_bit_p (args_to_skip, num) && is_gimple_reg (parm))
1495 tree ddecl;
1496 gimple *def_temp;
1498 /* This needs to be done even without MAY_HAVE_DEBUG_STMTS,
1499 otherwise if it didn't exist before, we'd end up with
1500 different SSA_NAME_VERSIONs between -g and -g0. */
1501 arg = get_or_create_ssa_default_def (cfun, parm);
1502 if (!MAY_HAVE_DEBUG_STMTS || debug_args == NULL)
1503 continue;
1505 while (i < len && (**debug_args)[i] != DECL_ORIGIN (parm))
1506 i += 2;
1507 if (i >= len)
1508 continue;
1509 ddecl = (**debug_args)[i + 1];
1510 def_temp
1511 = gimple_build_debug_bind (ddecl, unshare_expr (arg), call);
1512 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
1516 /* We avoid address being taken on any variable used by split part,
1517 so return slot optimization is always possible. Moreover this is
1518 required to make DECL_BY_REFERENCE work. */
1519 if (aggregate_value_p (DECL_RESULT (current_function_decl),
1520 TREE_TYPE (current_function_decl))
1521 && (!is_gimple_reg_type (TREE_TYPE (DECL_RESULT (current_function_decl)))
1522 || DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))))
1523 gimple_call_set_return_slot_opt (call, true);
1525 if (add_tsan_func_exit)
1526 tsan_func_exit_call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
1528 /* Update return value. This is bit tricky. When we do not return,
1529 do nothing. When we return we might need to update return_bb
1530 or produce a new return statement. */
1531 if (!split_part_return_p)
1533 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1534 if (tsan_func_exit_call)
1535 gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1537 else
1539 e = make_single_succ_edge (call_bb, return_bb,
1540 return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1541 ? 0 : EDGE_FALLTHRU);
1543 /* If there is return basic block, see what value we need to store
1544 return value into and put call just before it. */
1545 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1547 real_retval = retval;
1548 if (real_retval && split_point->split_part_set_retval)
1550 gphi_iterator psi;
1552 /* See if we need new SSA_NAME for the result.
1553 When DECL_BY_REFERENCE is true, retval is actually pointer to
1554 return value and it is constant in whole function. */
1555 if (TREE_CODE (retval) == SSA_NAME
1556 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1558 retval = copy_ssa_name (retval, call);
1560 /* See if there is PHI defining return value. */
1561 for (psi = gsi_start_phis (return_bb);
1562 !gsi_end_p (psi); gsi_next (&psi))
1563 if (!virtual_operand_p (gimple_phi_result (psi.phi ())))
1564 break;
1566 /* When there is PHI, just update its value. */
1567 if (TREE_CODE (retval) == SSA_NAME
1568 && !gsi_end_p (psi))
1569 add_phi_arg (psi.phi (), retval, e, UNKNOWN_LOCATION);
1570 /* Otherwise update the return BB itself.
1571 find_return_bb allows at most one assignment to return value,
1572 so update first statement. */
1573 else
1575 gimple_stmt_iterator bsi;
1576 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
1577 gsi_next (&bsi))
1578 if (greturn *return_stmt
1579 = dyn_cast <greturn *> (gsi_stmt (bsi)))
1581 gimple_return_set_retval (return_stmt, retval);
1582 break;
1584 else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
1585 && !gimple_clobber_p (gsi_stmt (bsi)))
1587 gimple_assign_set_rhs1 (gsi_stmt (bsi), retval);
1588 break;
1590 update_stmt (gsi_stmt (bsi));
1591 /* Also adjust clobbers and debug stmts in return_bb. */
1592 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
1593 gsi_next (&bsi))
1595 gimple *stmt = gsi_stmt (bsi);
1596 if (gimple_clobber_p (stmt)
1597 || is_gimple_debug (stmt))
1599 ssa_op_iter iter;
1600 use_operand_p use_p;
1601 bool update = false;
1602 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
1603 SSA_OP_USE)
1604 if (USE_FROM_PTR (use_p) == real_retval)
1606 SET_USE (use_p, retval);
1607 update = true;
1609 if (update)
1610 update_stmt (stmt);
1615 /* Replace retbnd with new one. */
1616 if (retbnd)
1618 gimple_stmt_iterator bsi;
1619 for (bsi = gsi_last_bb (return_bb); !gsi_end_p (bsi);
1620 gsi_prev (&bsi))
1621 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_RETURN)
1623 retbnd = copy_ssa_name (retbnd, call);
1624 gimple_return_set_retbnd (gsi_stmt (bsi), retbnd);
1625 update_stmt (gsi_stmt (bsi));
1626 break;
1630 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1632 gimple_call_set_lhs (call, build_simple_mem_ref (retval));
1633 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1635 else
1637 tree restype;
1638 restype = TREE_TYPE (DECL_RESULT (current_function_decl));
1639 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1640 if (!useless_type_conversion_p (TREE_TYPE (retval), restype))
1642 gimple *cpy;
1643 tree tem = create_tmp_reg (restype);
1644 tem = make_ssa_name (tem, call);
1645 cpy = gimple_build_assign (retval, NOP_EXPR, tem);
1646 gsi_insert_after (&gsi, cpy, GSI_NEW_STMT);
1647 retval = tem;
1649 /* Build bndret call to obtain returned bounds. */
1650 if (retbnd)
1651 chkp_insert_retbnd_call (retbnd, retval, &gsi);
1652 gimple_call_set_lhs (call, retval);
1653 update_stmt (call);
1656 else
1657 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1658 if (tsan_func_exit_call)
1659 gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1661 /* We don't use return block (there is either no return in function or
1662 multiple of them). So create new basic block with return statement.
1664 else
1666 greturn *ret;
1667 if (split_point->split_part_set_retval
1668 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1670 retval = DECL_RESULT (current_function_decl);
1672 if (chkp_function_instrumented_p (current_function_decl)
1673 && BOUNDED_P (retval))
1674 retbnd = create_tmp_reg (pointer_bounds_type_node);
1676 /* We use temporary register to hold value when aggregate_value_p
1677 is false. Similarly for DECL_BY_REFERENCE we must avoid extra
1678 copy. */
1679 if (!aggregate_value_p (retval, TREE_TYPE (current_function_decl))
1680 && !DECL_BY_REFERENCE (retval))
1681 retval = create_tmp_reg (TREE_TYPE (retval));
1682 if (is_gimple_reg (retval))
1684 /* When returning by reference, there is only one SSA name
1685 assigned to RESULT_DECL (that is pointer to return value).
1686 Look it up or create new one if it is missing. */
1687 if (DECL_BY_REFERENCE (retval))
1688 retval = get_or_create_ssa_default_def (cfun, retval);
1689 /* Otherwise produce new SSA name for return value. */
1690 else
1691 retval = make_ssa_name (retval, call);
1693 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1694 gimple_call_set_lhs (call, build_simple_mem_ref (retval));
1695 else
1696 gimple_call_set_lhs (call, retval);
1697 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1699 else
1701 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1702 if (retval
1703 && is_gimple_reg_type (TREE_TYPE (retval))
1704 && !is_gimple_val (retval))
1706 gassign *g
1707 = gimple_build_assign (make_ssa_name (TREE_TYPE (retval)),
1708 retval);
1709 retval = gimple_assign_lhs (g);
1710 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1713 /* Build bndret call to obtain returned bounds. */
1714 if (retbnd)
1715 chkp_insert_retbnd_call (retbnd, retval, &gsi);
1716 if (tsan_func_exit_call)
1717 gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1718 ret = gimple_build_return (retval);
1719 gsi_insert_after (&gsi, ret, GSI_NEW_STMT);
1722 free_dominance_info (CDI_DOMINATORS);
1723 free_dominance_info (CDI_POST_DOMINATORS);
1724 compute_fn_summary (node, true);
1727 /* Execute function splitting pass. */
1729 static unsigned int
1730 execute_split_functions (void)
1732 gimple_stmt_iterator bsi;
1733 basic_block bb;
1734 int overall_time = 0, overall_size = 0;
1735 int todo = 0;
1736 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1738 if (flags_from_decl_or_type (current_function_decl)
1739 & (ECF_NORETURN|ECF_MALLOC))
1741 if (dump_file)
1742 fprintf (dump_file, "Not splitting: noreturn/malloc function.\n");
1743 return 0;
1745 if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
1747 if (dump_file)
1748 fprintf (dump_file, "Not splitting: main function.\n");
1749 return 0;
1751 /* This can be relaxed; function might become inlinable after splitting
1752 away the uninlinable part. */
1753 if (ipa_fn_summaries
1754 && !ipa_fn_summaries->get (node)->inlinable)
1756 if (dump_file)
1757 fprintf (dump_file, "Not splitting: not inlinable.\n");
1758 return 0;
1760 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
1762 if (dump_file)
1763 fprintf (dump_file, "Not splitting: disregarding inline limits.\n");
1764 return 0;
1766 /* This can be relaxed; most of versioning tests actually prevents
1767 a duplication. */
1768 if (!tree_versionable_function_p (current_function_decl))
1770 if (dump_file)
1771 fprintf (dump_file, "Not splitting: not versionable.\n");
1772 return 0;
1774 /* FIXME: we could support this. */
1775 if (DECL_STRUCT_FUNCTION (current_function_decl)->static_chain_decl)
1777 if (dump_file)
1778 fprintf (dump_file, "Not splitting: nested function.\n");
1779 return 0;
1782 /* See if it makes sense to try to split.
1783 It makes sense to split if we inline, that is if we have direct calls to
1784 handle or direct calls are possibly going to appear as result of indirect
1785 inlining or LTO. Also handle -fprofile-generate as LTO to allow non-LTO
1786 training for LTO -fprofile-use build.
1788 Note that we are not completely conservative about disqualifying functions
1789 called once. It is possible that the caller is called more then once and
1790 then inlining would still benefit. */
1791 if ((!node->callers
1792 /* Local functions called once will be completely inlined most of time. */
1793 || (!node->callers->next_caller && node->local.local))
1794 && !node->address_taken
1795 && !node->has_aliases_p ()
1796 && (!flag_lto || !node->externally_visible))
1798 if (dump_file)
1799 fprintf (dump_file, "Not splitting: not called directly "
1800 "or called once.\n");
1801 return 0;
1804 /* FIXME: We can actually split if splitting reduces call overhead. */
1805 if (!flag_inline_small_functions
1806 && !DECL_DECLARED_INLINE_P (current_function_decl))
1808 if (dump_file)
1809 fprintf (dump_file, "Not splitting: not autoinlining and function"
1810 " is not inline.\n");
1811 return 0;
1814 /* We enforce splitting after loop headers when profile info is not
1815 available. */
1816 if (profile_status_for_fn (cfun) != PROFILE_READ)
1817 mark_dfs_back_edges ();
1819 /* Initialize bitmap to track forbidden calls. */
1820 forbidden_dominators = BITMAP_ALLOC (NULL);
1821 calculate_dominance_info (CDI_DOMINATORS);
1823 /* Compute local info about basic blocks and determine function size/time. */
1824 bb_info_vec.safe_grow_cleared (last_basic_block_for_fn (cfun) + 1);
1825 memset (&best_split_point, 0, sizeof (best_split_point));
1826 basic_block return_bb = find_return_bb ();
1827 int tsan_exit_found = -1;
1828 FOR_EACH_BB_FN (bb, cfun)
1830 int time = 0;
1831 int size = 0;
1832 int freq = compute_call_stmt_bb_frequency (current_function_decl, bb);
1834 if (dump_file && (dump_flags & TDF_DETAILS))
1835 fprintf (dump_file, "Basic block %i\n", bb->index);
1837 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1839 int this_time, this_size;
1840 gimple *stmt = gsi_stmt (bsi);
1842 this_size = estimate_num_insns (stmt, &eni_size_weights);
1843 this_time = estimate_num_insns (stmt, &eni_time_weights) * freq;
1844 size += this_size;
1845 time += this_time;
1846 check_forbidden_calls (stmt);
1848 if (dump_file && (dump_flags & TDF_DETAILS))
1850 fprintf (dump_file, " freq:%6i size:%3i time:%3i ",
1851 freq, this_size, this_time);
1852 print_gimple_stmt (dump_file, stmt, 0);
1855 if ((flag_sanitize & SANITIZE_THREAD)
1856 && gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT))
1858 /* We handle TSAN_FUNC_EXIT for splitting either in the
1859 return_bb, or in its immediate predecessors. */
1860 if ((bb != return_bb && !find_edge (bb, return_bb))
1861 || (tsan_exit_found != -1
1862 && tsan_exit_found != (bb != return_bb)))
1864 if (dump_file)
1865 fprintf (dump_file, "Not splitting: TSAN_FUNC_EXIT"
1866 " in unexpected basic block.\n");
1867 BITMAP_FREE (forbidden_dominators);
1868 bb_info_vec.release ();
1869 return 0;
1871 tsan_exit_found = bb != return_bb;
1874 overall_time += time;
1875 overall_size += size;
1876 bb_info_vec[bb->index].time = time;
1877 bb_info_vec[bb->index].size = size;
1879 find_split_points (return_bb, overall_time, overall_size);
1880 if (best_split_point.split_bbs)
1882 split_function (return_bb, &best_split_point, tsan_exit_found == 1);
1883 BITMAP_FREE (best_split_point.ssa_names_to_pass);
1884 BITMAP_FREE (best_split_point.split_bbs);
1885 todo = TODO_update_ssa | TODO_cleanup_cfg;
1887 BITMAP_FREE (forbidden_dominators);
1888 bb_info_vec.release ();
1889 return todo;
1892 namespace {
1894 const pass_data pass_data_split_functions =
1896 GIMPLE_PASS, /* type */
1897 "fnsplit", /* name */
1898 OPTGROUP_NONE, /* optinfo_flags */
1899 TV_IPA_FNSPLIT, /* tv_id */
1900 PROP_cfg, /* properties_required */
1901 0, /* properties_provided */
1902 0, /* properties_destroyed */
1903 0, /* todo_flags_start */
1904 0, /* todo_flags_finish */
1907 class pass_split_functions : public gimple_opt_pass
1909 public:
1910 pass_split_functions (gcc::context *ctxt)
1911 : gimple_opt_pass (pass_data_split_functions, ctxt)
1914 /* opt_pass methods: */
1915 virtual bool gate (function *);
1916 virtual unsigned int execute (function *)
1918 return execute_split_functions ();
1921 }; // class pass_split_functions
1923 bool
1924 pass_split_functions::gate (function *)
1926 /* When doing profile feedback, we want to execute the pass after profiling
1927 is read. So disable one in early optimization. */
1928 return (flag_partial_inlining
1929 && !profile_arc_flag && !flag_branch_probabilities);
1932 } // anon namespace
1934 gimple_opt_pass *
1935 make_pass_split_functions (gcc::context *ctxt)
1937 return new pass_split_functions (ctxt);
1940 /* Execute function splitting pass. */
1942 static unsigned int
1943 execute_feedback_split_functions (void)
1945 unsigned int retval = execute_split_functions ();
1946 if (retval)
1947 retval |= TODO_rebuild_cgraph_edges;
1948 return retval;
1951 namespace {
1953 const pass_data pass_data_feedback_split_functions =
1955 GIMPLE_PASS, /* type */
1956 "feedback_fnsplit", /* name */
1957 OPTGROUP_NONE, /* optinfo_flags */
1958 TV_IPA_FNSPLIT, /* tv_id */
1959 PROP_cfg, /* properties_required */
1960 0, /* properties_provided */
1961 0, /* properties_destroyed */
1962 0, /* todo_flags_start */
1963 0, /* todo_flags_finish */
1966 class pass_feedback_split_functions : public gimple_opt_pass
1968 public:
1969 pass_feedback_split_functions (gcc::context *ctxt)
1970 : gimple_opt_pass (pass_data_feedback_split_functions, ctxt)
1973 /* opt_pass methods: */
1974 virtual bool gate (function *);
1975 virtual unsigned int execute (function *)
1977 return execute_feedback_split_functions ();
1980 }; // class pass_feedback_split_functions
1982 bool
1983 pass_feedback_split_functions::gate (function *)
1985 /* We don't need to split when profiling at all, we are producing
1986 lousy code anyway. */
1987 return (flag_partial_inlining
1988 && flag_branch_probabilities);
1991 } // anon namespace
1993 gimple_opt_pass *
1994 make_pass_feedback_split_functions (gcc::context *ctxt)
1996 return new pass_feedback_split_functions (ctxt);