2016-09-25 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / ipa-split.c
blobac8b4787b100394845772fa0211078a1add3aeb9
1 /* Function splitting pass
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka <jh@suse.cz>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* The purpose of this pass is to split function bodies to improve
22 inlining. I.e. for function of the form:
24 func (...)
26 if (cheap_test)
27 something_small
28 else
29 something_big
32 Produce:
34 func.part (...)
36 something_big
39 func (...)
41 if (cheap_test)
42 something_small
43 else
44 func.part (...);
47 When func becomes inlinable and when cheap_test is often true, inlining func,
48 but not fund.part leads to performance improvement similar as inlining
49 original func while the code size growth is smaller.
51 The pass is organized in three stages:
52 1) Collect local info about basic block into BB_INFO structure and
53 compute function body estimated size and time.
54 2) Via DFS walk find all possible basic blocks where we can split
55 and chose best one.
56 3) If split point is found, split at the specified BB by creating a clone
57 and updating function to call it.
59 The decisions what functions to split are in execute_split_functions
60 and consider_split.
62 There are several possible future improvements for this pass including:
64 1) Splitting to break up large functions
65 2) Splitting to reduce stack frame usage
66 3) Allow split part of function to use values computed in the header part.
67 The values needs to be passed to split function, perhaps via same
68 interface as for nested functions or as argument.
69 4) Support for simple rematerialization. I.e. when split part use
70 value computed in header from function parameter in very cheap way, we
71 can just recompute it.
72 5) Support splitting of nested functions.
73 6) Support non-SSA arguments.
74 7) There is nothing preventing us from producing multiple parts of single function
75 when needed or splitting also the parts. */
77 #include "config.h"
78 #include "system.h"
79 #include "coretypes.h"
80 #include "backend.h"
81 #include "rtl.h"
82 #include "tree.h"
83 #include "gimple.h"
84 #include "cfghooks.h"
85 #include "alloc-pool.h"
86 #include "tree-pass.h"
87 #include "ssa.h"
88 #include "cgraph.h"
89 #include "diagnostic.h"
90 #include "fold-const.h"
91 #include "cfganal.h"
92 #include "calls.h"
93 #include "gimplify.h"
94 #include "gimple-iterator.h"
95 #include "gimplify-me.h"
96 #include "gimple-walk.h"
97 #include "symbol-summary.h"
98 #include "ipa-prop.h"
99 #include "tree-cfg.h"
100 #include "tree-into-ssa.h"
101 #include "tree-dfa.h"
102 #include "tree-inline.h"
103 #include "params.h"
104 #include "gimple-pretty-print.h"
105 #include "ipa-inline.h"
106 #include "cfgloop.h"
107 #include "tree-chkp.h"
109 /* Per basic block info. */
111 struct split_bb_info
113 unsigned int size;
114 unsigned int time;
117 static vec<split_bb_info> bb_info_vec;
119 /* Description of split point. */
121 struct split_point
123 /* Size of the partitions. */
124 unsigned int header_time, header_size, split_time, split_size;
126 /* SSA names that need to be passed into spit function. */
127 bitmap ssa_names_to_pass;
129 /* Basic block where we split (that will become entry point of new function. */
130 basic_block entry_bb;
132 /* Basic blocks we are splitting away. */
133 bitmap split_bbs;
135 /* True when return value is computed on split part and thus it needs
136 to be returned. */
137 bool split_part_set_retval;
140 /* Best split point found. */
142 struct split_point best_split_point;
144 /* Set of basic blocks that are not allowed to dominate a split point. */
146 static bitmap forbidden_dominators;
148 static tree find_retval (basic_block return_bb);
149 static tree find_retbnd (basic_block return_bb);
151 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
152 variable, check it if it is present in bitmap passed via DATA. */
154 static bool
155 test_nonssa_use (gimple *, tree t, tree, void *data)
157 t = get_base_address (t);
159 if (!t || is_gimple_reg (t))
160 return false;
162 if (TREE_CODE (t) == PARM_DECL
163 || (TREE_CODE (t) == VAR_DECL
164 && auto_var_in_fn_p (t, current_function_decl))
165 || TREE_CODE (t) == RESULT_DECL
166 /* Normal labels are part of CFG and will be handled gratefuly.
167 Forced labels however can be used directly by statements and
168 need to stay in one partition along with their uses. */
169 || (TREE_CODE (t) == LABEL_DECL
170 && FORCED_LABEL (t)))
171 return bitmap_bit_p ((bitmap)data, DECL_UID (t));
173 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
174 to pretend that the value pointed to is actual result decl. */
175 if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
176 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
177 && SSA_NAME_VAR (TREE_OPERAND (t, 0))
178 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
179 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
180 return
181 bitmap_bit_p ((bitmap)data,
182 DECL_UID (DECL_RESULT (current_function_decl)));
184 return false;
187 /* Dump split point CURRENT. */
189 static void
190 dump_split_point (FILE * file, struct split_point *current)
192 fprintf (file,
193 "Split point at BB %i\n"
194 " header time: %i header size: %i\n"
195 " split time: %i split size: %i\n bbs: ",
196 current->entry_bb->index, current->header_time,
197 current->header_size, current->split_time, current->split_size);
198 dump_bitmap (file, current->split_bbs);
199 fprintf (file, " SSA names to pass: ");
200 dump_bitmap (file, current->ssa_names_to_pass);
203 /* Look for all BBs in header that might lead to the split part and verify
204 that they are not defining any non-SSA var used by the split part.
205 Parameters are the same as for consider_split. */
207 static bool
208 verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
209 basic_block return_bb)
211 bitmap seen = BITMAP_ALLOC (NULL);
212 vec<basic_block> worklist = vNULL;
213 edge e;
214 edge_iterator ei;
215 bool ok = true;
216 basic_block bb;
218 FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
219 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
220 && !bitmap_bit_p (current->split_bbs, e->src->index))
222 worklist.safe_push (e->src);
223 bitmap_set_bit (seen, e->src->index);
226 while (!worklist.is_empty ())
228 bb = worklist.pop ();
229 FOR_EACH_EDGE (e, ei, bb->preds)
230 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
231 && bitmap_set_bit (seen, e->src->index))
233 gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
234 e->src->index));
235 worklist.safe_push (e->src);
237 for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
238 gsi_next (&bsi))
240 gimple *stmt = gsi_stmt (bsi);
241 if (is_gimple_debug (stmt))
242 continue;
243 if (walk_stmt_load_store_addr_ops
244 (stmt, non_ssa_vars, test_nonssa_use, test_nonssa_use,
245 test_nonssa_use))
247 ok = false;
248 goto done;
250 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
251 if (test_nonssa_use (stmt, gimple_label_label (label_stmt),
252 NULL_TREE, non_ssa_vars))
254 ok = false;
255 goto done;
258 for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
259 gsi_next (&bsi))
261 if (walk_stmt_load_store_addr_ops
262 (gsi_stmt (bsi), non_ssa_vars, test_nonssa_use, test_nonssa_use,
263 test_nonssa_use))
265 ok = false;
266 goto done;
269 FOR_EACH_EDGE (e, ei, bb->succs)
271 if (e->dest != return_bb)
272 continue;
273 for (gphi_iterator bsi = gsi_start_phis (return_bb);
274 !gsi_end_p (bsi);
275 gsi_next (&bsi))
277 gphi *stmt = bsi.phi ();
278 tree op = gimple_phi_arg_def (stmt, e->dest_idx);
280 if (virtual_operand_p (gimple_phi_result (stmt)))
281 continue;
282 if (TREE_CODE (op) != SSA_NAME
283 && test_nonssa_use (stmt, op, op, non_ssa_vars))
285 ok = false;
286 goto done;
292 /* Verify that the rest of function does not define any label
293 used by the split part. */
294 FOR_EACH_BB_FN (bb, cfun)
295 if (!bitmap_bit_p (current->split_bbs, bb->index)
296 && !bitmap_bit_p (seen, bb->index))
298 gimple_stmt_iterator bsi;
299 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
300 if (glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (bsi)))
302 if (test_nonssa_use (label_stmt,
303 gimple_label_label (label_stmt),
304 NULL_TREE, non_ssa_vars))
306 ok = false;
307 goto done;
310 else
311 break;
314 done:
315 BITMAP_FREE (seen);
316 worklist.release ();
317 return ok;
320 /* If STMT is a call, check the callee against a list of forbidden
321 predicate functions. If a match is found, look for uses of the
322 call result in condition statements that compare against zero.
323 For each such use, find the block targeted by the condition
324 statement for the nonzero result, and set the bit for this block
325 in the forbidden dominators bitmap. The purpose of this is to avoid
326 selecting a split point where we are likely to lose the chance
327 to optimize away an unused function call. */
329 static void
330 check_forbidden_calls (gimple *stmt)
332 imm_use_iterator use_iter;
333 use_operand_p use_p;
334 tree lhs;
336 /* At the moment, __builtin_constant_p is the only forbidden
337 predicate function call (see PR49642). */
338 if (!gimple_call_builtin_p (stmt, BUILT_IN_CONSTANT_P))
339 return;
341 lhs = gimple_call_lhs (stmt);
343 if (!lhs || TREE_CODE (lhs) != SSA_NAME)
344 return;
346 FOR_EACH_IMM_USE_FAST (use_p, use_iter, lhs)
348 tree op1;
349 basic_block use_bb, forbidden_bb;
350 enum tree_code code;
351 edge true_edge, false_edge;
352 gcond *use_stmt;
354 use_stmt = dyn_cast <gcond *> (USE_STMT (use_p));
355 if (!use_stmt)
356 continue;
358 /* Assuming canonical form for GIMPLE_COND here, with constant
359 in second position. */
360 op1 = gimple_cond_rhs (use_stmt);
361 code = gimple_cond_code (use_stmt);
362 use_bb = gimple_bb (use_stmt);
364 extract_true_false_edges_from_block (use_bb, &true_edge, &false_edge);
366 /* We're only interested in comparisons that distinguish
367 unambiguously from zero. */
368 if (!integer_zerop (op1) || code == LE_EXPR || code == GE_EXPR)
369 continue;
371 if (code == EQ_EXPR)
372 forbidden_bb = false_edge->dest;
373 else
374 forbidden_bb = true_edge->dest;
376 bitmap_set_bit (forbidden_dominators, forbidden_bb->index);
380 /* If BB is dominated by any block in the forbidden dominators set,
381 return TRUE; else FALSE. */
383 static bool
384 dominated_by_forbidden (basic_block bb)
386 unsigned dom_bb;
387 bitmap_iterator bi;
389 EXECUTE_IF_SET_IN_BITMAP (forbidden_dominators, 1, dom_bb, bi)
391 if (dominated_by_p (CDI_DOMINATORS, bb,
392 BASIC_BLOCK_FOR_FN (cfun, dom_bb)))
393 return true;
396 return false;
399 /* For give split point CURRENT and return block RETURN_BB return 1
400 if ssa name VAL is set by split part and 0 otherwise. */
401 static bool
402 split_part_set_ssa_name_p (tree val, struct split_point *current,
403 basic_block return_bb)
405 if (TREE_CODE (val) != SSA_NAME)
406 return false;
408 return (!SSA_NAME_IS_DEFAULT_DEF (val)
409 && (bitmap_bit_p (current->split_bbs,
410 gimple_bb (SSA_NAME_DEF_STMT (val))->index)
411 || gimple_bb (SSA_NAME_DEF_STMT (val)) == return_bb));
414 /* We found an split_point CURRENT. NON_SSA_VARS is bitmap of all non ssa
415 variables used and RETURN_BB is return basic block.
416 See if we can split function here. */
418 static void
419 consider_split (struct split_point *current, bitmap non_ssa_vars,
420 basic_block return_bb)
422 tree parm;
423 unsigned int num_args = 0;
424 unsigned int call_overhead;
425 edge e;
426 edge_iterator ei;
427 gphi_iterator bsi;
428 unsigned int i;
429 int incoming_freq = 0;
430 tree retval;
431 tree retbnd;
432 bool back_edge = false;
434 if (dump_file && (dump_flags & TDF_DETAILS))
435 dump_split_point (dump_file, current);
437 FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
439 if (e->flags & EDGE_DFS_BACK)
440 back_edge = true;
441 if (!bitmap_bit_p (current->split_bbs, e->src->index))
442 incoming_freq += EDGE_FREQUENCY (e);
445 /* Do not split when we would end up calling function anyway. */
446 if (incoming_freq
447 >= (ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
448 * PARAM_VALUE (PARAM_PARTIAL_INLINING_ENTRY_PROBABILITY) / 100))
450 /* When profile is guessed, we can not expect it to give us
451 realistic estimate on likelyness of function taking the
452 complex path. As a special case, when tail of the function is
453 a loop, enable splitting since inlining code skipping the loop
454 is likely noticeable win. */
455 if (back_edge
456 && profile_status_for_fn (cfun) != PROFILE_READ
457 && incoming_freq < ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency)
459 if (dump_file && (dump_flags & TDF_DETAILS))
460 fprintf (dump_file,
461 " Split before loop, accepting despite low frequencies %i %i.\n",
462 incoming_freq,
463 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency);
465 else
467 if (dump_file && (dump_flags & TDF_DETAILS))
468 fprintf (dump_file,
469 " Refused: incoming frequency is too large.\n");
470 return;
474 if (!current->header_size)
476 if (dump_file && (dump_flags & TDF_DETAILS))
477 fprintf (dump_file, " Refused: header empty\n");
478 return;
481 /* Verify that PHI args on entry are either virtual or all their operands
482 incoming from header are the same. */
483 for (bsi = gsi_start_phis (current->entry_bb); !gsi_end_p (bsi); gsi_next (&bsi))
485 gphi *stmt = bsi.phi ();
486 tree val = NULL;
488 if (virtual_operand_p (gimple_phi_result (stmt)))
489 continue;
490 for (i = 0; i < gimple_phi_num_args (stmt); i++)
492 edge e = gimple_phi_arg_edge (stmt, i);
493 if (!bitmap_bit_p (current->split_bbs, e->src->index))
495 tree edge_val = gimple_phi_arg_def (stmt, i);
496 if (val && edge_val != val)
498 if (dump_file && (dump_flags & TDF_DETAILS))
499 fprintf (dump_file,
500 " Refused: entry BB has PHI with multiple variants\n");
501 return;
503 val = edge_val;
509 /* See what argument we will pass to the split function and compute
510 call overhead. */
511 call_overhead = eni_size_weights.call_cost;
512 for (parm = DECL_ARGUMENTS (current_function_decl); parm;
513 parm = DECL_CHAIN (parm))
515 if (!is_gimple_reg (parm))
517 if (bitmap_bit_p (non_ssa_vars, DECL_UID (parm)))
519 if (dump_file && (dump_flags & TDF_DETAILS))
520 fprintf (dump_file,
521 " Refused: need to pass non-ssa param values\n");
522 return;
525 else
527 tree ddef = ssa_default_def (cfun, parm);
528 if (ddef
529 && bitmap_bit_p (current->ssa_names_to_pass,
530 SSA_NAME_VERSION (ddef)))
532 if (!VOID_TYPE_P (TREE_TYPE (parm)))
533 call_overhead += estimate_move_cost (TREE_TYPE (parm), false);
534 num_args++;
538 if (!VOID_TYPE_P (TREE_TYPE (current_function_decl)))
539 call_overhead += estimate_move_cost (TREE_TYPE (current_function_decl),
540 false);
542 if (current->split_size <= call_overhead)
544 if (dump_file && (dump_flags & TDF_DETAILS))
545 fprintf (dump_file,
546 " Refused: split size is smaller than call overhead\n");
547 return;
549 if (current->header_size + call_overhead
550 >= (unsigned int)(DECL_DECLARED_INLINE_P (current_function_decl)
551 ? MAX_INLINE_INSNS_SINGLE
552 : MAX_INLINE_INSNS_AUTO))
554 if (dump_file && (dump_flags & TDF_DETAILS))
555 fprintf (dump_file,
556 " Refused: header size is too large for inline candidate\n");
557 return;
560 /* Splitting functions brings the target out of comdat group; this will
561 lead to code duplication if the function is reused by other unit.
562 Limit this duplication. This is consistent with limit in tree-sra.c
563 FIXME: with LTO we ought to be able to do better! */
564 if (DECL_ONE_ONLY (current_function_decl)
565 && current->split_size >= (unsigned int) MAX_INLINE_INSNS_AUTO)
567 if (dump_file && (dump_flags & TDF_DETAILS))
568 fprintf (dump_file,
569 " Refused: function is COMDAT and tail is too large\n");
570 return;
572 /* For comdat functions also reject very small tails; those will likely get
573 inlined back and we do not want to risk the duplication overhead.
574 FIXME: with LTO we ought to be able to do better! */
575 if (DECL_ONE_ONLY (current_function_decl)
576 && current->split_size
577 <= (unsigned int) PARAM_VALUE (PARAM_EARLY_INLINING_INSNS) / 2)
579 if (dump_file && (dump_flags & TDF_DETAILS))
580 fprintf (dump_file,
581 " Refused: function is COMDAT and tail is too small\n");
582 return;
585 /* FIXME: we currently can pass only SSA function parameters to the split
586 arguments. Once parm_adjustment infrastructure is supported by cloning,
587 we can pass more than that. */
588 if (num_args != bitmap_count_bits (current->ssa_names_to_pass))
591 if (dump_file && (dump_flags & TDF_DETAILS))
592 fprintf (dump_file,
593 " Refused: need to pass non-param values\n");
594 return;
597 /* When there are non-ssa vars used in the split region, see if they
598 are used in the header region. If so, reject the split.
599 FIXME: we can use nested function support to access both. */
600 if (!bitmap_empty_p (non_ssa_vars)
601 && !verify_non_ssa_vars (current, non_ssa_vars, return_bb))
603 if (dump_file && (dump_flags & TDF_DETAILS))
604 fprintf (dump_file,
605 " Refused: split part has non-ssa uses\n");
606 return;
609 /* If the split point is dominated by a forbidden block, reject
610 the split. */
611 if (!bitmap_empty_p (forbidden_dominators)
612 && dominated_by_forbidden (current->entry_bb))
614 if (dump_file && (dump_flags & TDF_DETAILS))
615 fprintf (dump_file,
616 " Refused: split point dominated by forbidden block\n");
617 return;
620 /* See if retval used by return bb is computed by header or split part.
621 When it is computed by split part, we need to produce return statement
622 in the split part and add code to header to pass it around.
624 This is bit tricky to test:
625 1) When there is no return_bb or no return value, we always pass
626 value around.
627 2) Invariants are always computed by caller.
628 3) For SSA we need to look if defining statement is in header or split part
629 4) For non-SSA we need to look where the var is computed. */
630 retval = find_retval (return_bb);
631 if (!retval)
633 /* If there is a return_bb with no return value in function returning
634 value by reference, also make the split part return void, otherwise
635 we expansion would try to create a non-POD temporary, which is
636 invalid. */
637 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
638 && DECL_RESULT (current_function_decl)
639 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
640 current->split_part_set_retval = false;
641 else
642 current->split_part_set_retval = true;
644 else if (is_gimple_min_invariant (retval))
645 current->split_part_set_retval = false;
646 /* Special case is value returned by reference we record as if it was non-ssa
647 set to result_decl. */
648 else if (TREE_CODE (retval) == SSA_NAME
649 && SSA_NAME_VAR (retval)
650 && TREE_CODE (SSA_NAME_VAR (retval)) == RESULT_DECL
651 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
652 current->split_part_set_retval
653 = bitmap_bit_p (non_ssa_vars, DECL_UID (SSA_NAME_VAR (retval)));
654 else if (TREE_CODE (retval) == SSA_NAME)
655 current->split_part_set_retval
656 = split_part_set_ssa_name_p (retval, current, return_bb);
657 else if (TREE_CODE (retval) == PARM_DECL)
658 current->split_part_set_retval = false;
659 else if (TREE_CODE (retval) == VAR_DECL
660 || TREE_CODE (retval) == RESULT_DECL)
661 current->split_part_set_retval
662 = bitmap_bit_p (non_ssa_vars, DECL_UID (retval));
663 else
664 current->split_part_set_retval = true;
666 /* See if retbnd used by return bb is computed by header or split part. */
667 retbnd = find_retbnd (return_bb);
668 if (retbnd)
670 bool split_part_set_retbnd
671 = split_part_set_ssa_name_p (retbnd, current, return_bb);
673 /* If we have both return value and bounds then keep their definitions
674 in a single function. We use SSA names to link returned bounds and
675 value and therefore do not handle cases when result is passed by
676 reference (which should not be our case anyway since bounds are
677 returned for pointers only). */
678 if ((DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))
679 && current->split_part_set_retval)
680 || split_part_set_retbnd != current->split_part_set_retval)
682 if (dump_file && (dump_flags & TDF_DETAILS))
683 fprintf (dump_file,
684 " Refused: split point splits return value and bounds\n");
685 return;
689 /* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
690 for the return value. If there are other PHIs, give up. */
691 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
693 gphi_iterator psi;
695 for (psi = gsi_start_phis (return_bb); !gsi_end_p (psi); gsi_next (&psi))
696 if (!virtual_operand_p (gimple_phi_result (psi.phi ()))
697 && !(retval
698 && current->split_part_set_retval
699 && TREE_CODE (retval) == SSA_NAME
700 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))
701 && SSA_NAME_DEF_STMT (retval) == psi.phi ()))
703 if (dump_file && (dump_flags & TDF_DETAILS))
704 fprintf (dump_file,
705 " Refused: return bb has extra PHIs\n");
706 return;
710 if (dump_file && (dump_flags & TDF_DETAILS))
711 fprintf (dump_file, " Accepted!\n");
713 /* At the moment chose split point with lowest frequency and that leaves
714 out smallest size of header.
715 In future we might re-consider this heuristics. */
716 if (!best_split_point.split_bbs
717 || best_split_point.entry_bb->frequency > current->entry_bb->frequency
718 || (best_split_point.entry_bb->frequency == current->entry_bb->frequency
719 && best_split_point.split_size < current->split_size))
722 if (dump_file && (dump_flags & TDF_DETAILS))
723 fprintf (dump_file, " New best split point!\n");
724 if (best_split_point.ssa_names_to_pass)
726 BITMAP_FREE (best_split_point.ssa_names_to_pass);
727 BITMAP_FREE (best_split_point.split_bbs);
729 best_split_point = *current;
730 best_split_point.ssa_names_to_pass = BITMAP_ALLOC (NULL);
731 bitmap_copy (best_split_point.ssa_names_to_pass,
732 current->ssa_names_to_pass);
733 best_split_point.split_bbs = BITMAP_ALLOC (NULL);
734 bitmap_copy (best_split_point.split_bbs, current->split_bbs);
738 /* Return basic block containing RETURN statement. We allow basic blocks
739 of the form:
740 <retval> = tmp_var;
741 return <retval>
742 but return_bb can not be more complex than this (except for
743 -fsanitize=thread we allow TSAN_FUNC_EXIT () internal call in there).
744 If nothing is found, return the exit block.
746 When there are multiple RETURN statement, chose one with return value,
747 since that one is more likely shared by multiple code paths.
749 Return BB is special, because for function splitting it is the only
750 basic block that is duplicated in between header and split part of the
751 function.
753 TODO: We might support multiple return blocks. */
755 static basic_block
756 find_return_bb (void)
758 edge e;
759 basic_block return_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
760 gimple_stmt_iterator bsi;
761 bool found_return = false;
762 tree retval = NULL_TREE;
764 if (!single_pred_p (EXIT_BLOCK_PTR_FOR_FN (cfun)))
765 return return_bb;
767 e = single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun));
768 for (bsi = gsi_last_bb (e->src); !gsi_end_p (bsi); gsi_prev (&bsi))
770 gimple *stmt = gsi_stmt (bsi);
771 if (gimple_code (stmt) == GIMPLE_LABEL
772 || is_gimple_debug (stmt)
773 || gimple_clobber_p (stmt))
775 else if (gimple_code (stmt) == GIMPLE_ASSIGN
776 && found_return
777 && gimple_assign_single_p (stmt)
778 && (auto_var_in_fn_p (gimple_assign_rhs1 (stmt),
779 current_function_decl)
780 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
781 && retval == gimple_assign_lhs (stmt))
783 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
785 found_return = true;
786 retval = gimple_return_retval (return_stmt);
788 /* For -fsanitize=thread, allow also TSAN_FUNC_EXIT () in the return
789 bb. */
790 else if ((flag_sanitize & SANITIZE_THREAD)
791 && is_gimple_call (stmt)
792 && gimple_call_internal_p (stmt)
793 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
795 else
796 break;
798 if (gsi_end_p (bsi) && found_return)
799 return_bb = e->src;
801 return return_bb;
804 /* Given return basic block RETURN_BB, see where return value is really
805 stored. */
806 static tree
807 find_retval (basic_block return_bb)
809 gimple_stmt_iterator bsi;
810 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi); gsi_next (&bsi))
811 if (greturn *return_stmt = dyn_cast <greturn *> (gsi_stmt (bsi)))
812 return gimple_return_retval (return_stmt);
813 else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
814 && !gimple_clobber_p (gsi_stmt (bsi)))
815 return gimple_assign_rhs1 (gsi_stmt (bsi));
816 return NULL;
819 /* Given return basic block RETURN_BB, see where return bounds are really
820 stored. */
821 static tree
822 find_retbnd (basic_block return_bb)
824 gimple_stmt_iterator bsi;
825 for (bsi = gsi_last_bb (return_bb); !gsi_end_p (bsi); gsi_prev (&bsi))
826 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_RETURN)
827 return gimple_return_retbnd (gsi_stmt (bsi));
828 return NULL;
831 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
832 variable, mark it as used in bitmap passed via DATA.
833 Return true when access to T prevents splitting the function. */
835 static bool
836 mark_nonssa_use (gimple *, tree t, tree, void *data)
838 t = get_base_address (t);
840 if (!t || is_gimple_reg (t))
841 return false;
843 /* At present we can't pass non-SSA arguments to split function.
844 FIXME: this can be relaxed by passing references to arguments. */
845 if (TREE_CODE (t) == PARM_DECL)
847 if (dump_file && (dump_flags & TDF_DETAILS))
848 fprintf (dump_file,
849 "Cannot split: use of non-ssa function parameter.\n");
850 return true;
853 if ((TREE_CODE (t) == VAR_DECL
854 && auto_var_in_fn_p (t, current_function_decl))
855 || TREE_CODE (t) == RESULT_DECL
856 || (TREE_CODE (t) == LABEL_DECL
857 && FORCED_LABEL (t)))
858 bitmap_set_bit ((bitmap)data, DECL_UID (t));
860 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
861 to pretend that the value pointed to is actual result decl. */
862 if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
863 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
864 && SSA_NAME_VAR (TREE_OPERAND (t, 0))
865 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
866 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
867 return
868 bitmap_bit_p ((bitmap)data,
869 DECL_UID (DECL_RESULT (current_function_decl)));
871 return false;
874 /* Compute local properties of basic block BB we collect when looking for
875 split points. We look for ssa defs and store them in SET_SSA_NAMES,
876 for ssa uses and store them in USED_SSA_NAMES and for any non-SSA automatic
877 vars stored in NON_SSA_VARS.
879 When BB has edge to RETURN_BB, collect uses in RETURN_BB too.
881 Return false when BB contains something that prevents it from being put into
882 split function. */
884 static bool
885 visit_bb (basic_block bb, basic_block return_bb,
886 bitmap set_ssa_names, bitmap used_ssa_names,
887 bitmap non_ssa_vars)
889 edge e;
890 edge_iterator ei;
891 bool can_split = true;
893 for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
894 gsi_next (&bsi))
896 gimple *stmt = gsi_stmt (bsi);
897 tree op;
898 ssa_op_iter iter;
899 tree decl;
901 if (is_gimple_debug (stmt))
902 continue;
904 if (gimple_clobber_p (stmt))
905 continue;
907 /* FIXME: We can split regions containing EH. We can not however
908 split RESX, EH_DISPATCH and EH_POINTER referring to same region
909 into different partitions. This would require tracking of
910 EH regions and checking in consider_split_point if they
911 are not used elsewhere. */
912 if (gimple_code (stmt) == GIMPLE_RESX)
914 if (dump_file && (dump_flags & TDF_DETAILS))
915 fprintf (dump_file, "Cannot split: resx.\n");
916 can_split = false;
918 if (gimple_code (stmt) == GIMPLE_EH_DISPATCH)
920 if (dump_file && (dump_flags & TDF_DETAILS))
921 fprintf (dump_file, "Cannot split: eh dispatch.\n");
922 can_split = false;
925 /* Check builtins that prevent splitting. */
926 if (gimple_code (stmt) == GIMPLE_CALL
927 && (decl = gimple_call_fndecl (stmt)) != NULL_TREE
928 && DECL_BUILT_IN (decl)
929 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
930 switch (DECL_FUNCTION_CODE (decl))
932 /* FIXME: once we will allow passing non-parm values to split part,
933 we need to be sure to handle correct builtin_stack_save and
934 builtin_stack_restore. At the moment we are safe; there is no
935 way to store builtin_stack_save result in non-SSA variable
936 since all calls to those are compiler generated. */
937 case BUILT_IN_APPLY:
938 case BUILT_IN_APPLY_ARGS:
939 case BUILT_IN_VA_START:
940 if (dump_file && (dump_flags & TDF_DETAILS))
941 fprintf (dump_file,
942 "Cannot split: builtin_apply and va_start.\n");
943 can_split = false;
944 break;
945 case BUILT_IN_EH_POINTER:
946 if (dump_file && (dump_flags & TDF_DETAILS))
947 fprintf (dump_file, "Cannot split: builtin_eh_pointer.\n");
948 can_split = false;
949 break;
950 default:
951 break;
954 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
955 bitmap_set_bit (set_ssa_names, SSA_NAME_VERSION (op));
956 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
957 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
958 can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
959 mark_nonssa_use,
960 mark_nonssa_use,
961 mark_nonssa_use);
963 for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
964 gsi_next (&bsi))
966 gphi *stmt = bsi.phi ();
967 unsigned int i;
969 if (virtual_operand_p (gimple_phi_result (stmt)))
970 continue;
971 bitmap_set_bit (set_ssa_names,
972 SSA_NAME_VERSION (gimple_phi_result (stmt)));
973 for (i = 0; i < gimple_phi_num_args (stmt); i++)
975 tree op = gimple_phi_arg_def (stmt, i);
976 if (TREE_CODE (op) == SSA_NAME)
977 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
979 can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
980 mark_nonssa_use,
981 mark_nonssa_use,
982 mark_nonssa_use);
984 /* Record also uses coming from PHI operand in return BB. */
985 FOR_EACH_EDGE (e, ei, bb->succs)
986 if (e->dest == return_bb)
988 for (gphi_iterator bsi = gsi_start_phis (return_bb);
989 !gsi_end_p (bsi);
990 gsi_next (&bsi))
992 gphi *stmt = bsi.phi ();
993 tree op = gimple_phi_arg_def (stmt, e->dest_idx);
995 if (virtual_operand_p (gimple_phi_result (stmt)))
996 continue;
997 if (TREE_CODE (op) == SSA_NAME)
998 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
999 else
1000 can_split &= !mark_nonssa_use (stmt, op, op, non_ssa_vars);
1003 return can_split;
1006 /* Stack entry for recursive DFS walk in find_split_point. */
1008 struct stack_entry
1010 /* Basic block we are examining. */
1011 basic_block bb;
1013 /* SSA names set and used by the BB and all BBs reachable
1014 from it via DFS walk. */
1015 bitmap set_ssa_names, used_ssa_names;
1016 bitmap non_ssa_vars;
1018 /* All BBS visited from this BB via DFS walk. */
1019 bitmap bbs_visited;
1021 /* Last examined edge in DFS walk. Since we walk unoriented graph,
1022 the value is up to sum of incoming and outgoing edges of BB. */
1023 unsigned int edge_num;
1025 /* Stack entry index of earliest BB reachable from current BB
1026 or any BB visited later in DFS walk. */
1027 int earliest;
1029 /* Overall time and size of all BBs reached from this BB in DFS walk. */
1030 int overall_time, overall_size;
1032 /* When false we can not split on this BB. */
1033 bool can_split;
1037 /* Find all articulations and call consider_split on them.
1038 OVERALL_TIME and OVERALL_SIZE is time and size of the function.
1040 We perform basic algorithm for finding an articulation in a graph
1041 created from CFG by considering it to be an unoriented graph.
1043 The articulation is discovered via DFS walk. We collect earliest
1044 basic block on stack that is reachable via backward edge. Articulation
1045 is any basic block such that there is no backward edge bypassing it.
1046 To reduce stack usage we maintain heap allocated stack in STACK vector.
1047 AUX pointer of BB is set to index it appears in the stack or -1 once
1048 it is visited and popped off the stack.
1050 The algorithm finds articulation after visiting the whole component
1051 reachable by it. This makes it convenient to collect information about
1052 the component used by consider_split. */
1054 static void
1055 find_split_points (basic_block return_bb, int overall_time, int overall_size)
1057 stack_entry first;
1058 vec<stack_entry> stack = vNULL;
1059 basic_block bb;
1060 struct split_point current;
1062 current.header_time = overall_time;
1063 current.header_size = overall_size;
1064 current.split_time = 0;
1065 current.split_size = 0;
1066 current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
1068 first.bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1069 first.edge_num = 0;
1070 first.overall_time = 0;
1071 first.overall_size = 0;
1072 first.earliest = INT_MAX;
1073 first.set_ssa_names = 0;
1074 first.used_ssa_names = 0;
1075 first.non_ssa_vars = 0;
1076 first.bbs_visited = 0;
1077 first.can_split = false;
1078 stack.safe_push (first);
1079 ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = (void *)(intptr_t)-1;
1081 while (!stack.is_empty ())
1083 stack_entry *entry = &stack.last ();
1085 /* We are walking an acyclic graph, so edge_num counts
1086 succ and pred edges together. However when considering
1087 articulation, we want to have processed everything reachable
1088 from articulation but nothing that reaches into it. */
1089 if (entry->edge_num == EDGE_COUNT (entry->bb->succs)
1090 && entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1092 int pos = stack.length ();
1093 entry->can_split &= visit_bb (entry->bb, return_bb,
1094 entry->set_ssa_names,
1095 entry->used_ssa_names,
1096 entry->non_ssa_vars);
1097 if (pos <= entry->earliest && !entry->can_split
1098 && dump_file && (dump_flags & TDF_DETAILS))
1099 fprintf (dump_file,
1100 "found articulation at bb %i but can not split\n",
1101 entry->bb->index);
1102 if (pos <= entry->earliest && entry->can_split)
1104 if (dump_file && (dump_flags & TDF_DETAILS))
1105 fprintf (dump_file, "found articulation at bb %i\n",
1106 entry->bb->index);
1107 current.entry_bb = entry->bb;
1108 current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
1109 bitmap_and_compl (current.ssa_names_to_pass,
1110 entry->used_ssa_names, entry->set_ssa_names);
1111 current.header_time = overall_time - entry->overall_time;
1112 current.header_size = overall_size - entry->overall_size;
1113 current.split_time = entry->overall_time;
1114 current.split_size = entry->overall_size;
1115 current.split_bbs = entry->bbs_visited;
1116 consider_split (&current, entry->non_ssa_vars, return_bb);
1117 BITMAP_FREE (current.ssa_names_to_pass);
1120 /* Do actual DFS walk. */
1121 if (entry->edge_num
1122 < (EDGE_COUNT (entry->bb->succs)
1123 + EDGE_COUNT (entry->bb->preds)))
1125 edge e;
1126 basic_block dest;
1127 if (entry->edge_num < EDGE_COUNT (entry->bb->succs))
1129 e = EDGE_SUCC (entry->bb, entry->edge_num);
1130 dest = e->dest;
1132 else
1134 e = EDGE_PRED (entry->bb, entry->edge_num
1135 - EDGE_COUNT (entry->bb->succs));
1136 dest = e->src;
1139 entry->edge_num++;
1141 /* New BB to visit, push it to the stack. */
1142 if (dest != return_bb && dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1143 && !dest->aux)
1145 stack_entry new_entry;
1147 new_entry.bb = dest;
1148 new_entry.edge_num = 0;
1149 new_entry.overall_time
1150 = bb_info_vec[dest->index].time;
1151 new_entry.overall_size
1152 = bb_info_vec[dest->index].size;
1153 new_entry.earliest = INT_MAX;
1154 new_entry.set_ssa_names = BITMAP_ALLOC (NULL);
1155 new_entry.used_ssa_names = BITMAP_ALLOC (NULL);
1156 new_entry.bbs_visited = BITMAP_ALLOC (NULL);
1157 new_entry.non_ssa_vars = BITMAP_ALLOC (NULL);
1158 new_entry.can_split = true;
1159 bitmap_set_bit (new_entry.bbs_visited, dest->index);
1160 stack.safe_push (new_entry);
1161 dest->aux = (void *)(intptr_t)stack.length ();
1163 /* Back edge found, record the earliest point. */
1164 else if ((intptr_t)dest->aux > 0
1165 && (intptr_t)dest->aux < entry->earliest)
1166 entry->earliest = (intptr_t)dest->aux;
1168 /* We are done with examining the edges. Pop off the value from stack
1169 and merge stuff we accumulate during the walk. */
1170 else if (entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1172 stack_entry *prev = &stack[stack.length () - 2];
1174 entry->bb->aux = (void *)(intptr_t)-1;
1175 prev->can_split &= entry->can_split;
1176 if (prev->set_ssa_names)
1178 bitmap_ior_into (prev->set_ssa_names, entry->set_ssa_names);
1179 bitmap_ior_into (prev->used_ssa_names, entry->used_ssa_names);
1180 bitmap_ior_into (prev->bbs_visited, entry->bbs_visited);
1181 bitmap_ior_into (prev->non_ssa_vars, entry->non_ssa_vars);
1183 if (prev->earliest > entry->earliest)
1184 prev->earliest = entry->earliest;
1185 prev->overall_time += entry->overall_time;
1186 prev->overall_size += entry->overall_size;
1187 BITMAP_FREE (entry->set_ssa_names);
1188 BITMAP_FREE (entry->used_ssa_names);
1189 BITMAP_FREE (entry->bbs_visited);
1190 BITMAP_FREE (entry->non_ssa_vars);
1191 stack.pop ();
1193 else
1194 stack.pop ();
1196 ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = NULL;
1197 FOR_EACH_BB_FN (bb, cfun)
1198 bb->aux = NULL;
1199 stack.release ();
1200 BITMAP_FREE (current.ssa_names_to_pass);
1203 /* Split function at SPLIT_POINT. */
1205 static void
1206 split_function (basic_block return_bb, struct split_point *split_point,
1207 bool add_tsan_func_exit)
1209 vec<tree> args_to_pass = vNULL;
1210 bitmap args_to_skip;
1211 tree parm;
1212 int num = 0;
1213 cgraph_node *node, *cur_node = cgraph_node::get (current_function_decl);
1214 basic_block call_bb;
1215 gcall *call, *tsan_func_exit_call = NULL;
1216 edge e;
1217 edge_iterator ei;
1218 tree retval = NULL, real_retval = NULL, retbnd = NULL;
1219 bool with_bounds = chkp_function_instrumented_p (current_function_decl);
1220 gimple *last_stmt = NULL;
1221 unsigned int i;
1222 tree arg, ddef;
1224 if (dump_file)
1226 fprintf (dump_file, "\n\nSplitting function at:\n");
1227 dump_split_point (dump_file, split_point);
1230 if (cur_node->local.can_change_signature)
1231 args_to_skip = BITMAP_ALLOC (NULL);
1232 else
1233 args_to_skip = NULL;
1235 /* Collect the parameters of new function and args_to_skip bitmap. */
1236 for (parm = DECL_ARGUMENTS (current_function_decl);
1237 parm; parm = DECL_CHAIN (parm), num++)
1238 if (args_to_skip
1239 && (!is_gimple_reg (parm)
1240 || (ddef = ssa_default_def (cfun, parm)) == NULL_TREE
1241 || !bitmap_bit_p (split_point->ssa_names_to_pass,
1242 SSA_NAME_VERSION (ddef))))
1243 bitmap_set_bit (args_to_skip, num);
1244 else
1246 /* This parm might not have been used up to now, but is going to be
1247 used, hence register it. */
1248 if (is_gimple_reg (parm))
1249 arg = get_or_create_ssa_default_def (cfun, parm);
1250 else
1251 arg = parm;
1253 if (!useless_type_conversion_p (DECL_ARG_TYPE (parm), TREE_TYPE (arg)))
1254 arg = fold_convert (DECL_ARG_TYPE (parm), arg);
1255 args_to_pass.safe_push (arg);
1258 /* See if the split function will return. */
1259 bool split_part_return_p = false;
1260 FOR_EACH_EDGE (e, ei, return_bb->preds)
1262 if (bitmap_bit_p (split_point->split_bbs, e->src->index))
1263 split_part_return_p = true;
1266 /* Add return block to what will become the split function.
1267 We do not return; no return block is needed. */
1268 if (!split_part_return_p)
1270 /* We have no return block, so nothing is needed. */
1271 else if (return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1273 /* When we do not want to return value, we need to construct
1274 new return block with empty return statement.
1275 FIXME: Once we are able to change return type, we should change function
1276 to return void instead of just outputting function with undefined return
1277 value. For structures this affects quality of codegen. */
1278 else if ((retval = find_retval (return_bb))
1279 && !split_point->split_part_set_retval)
1281 bool redirected = true;
1282 basic_block new_return_bb = create_basic_block (NULL, 0, return_bb);
1283 gimple_stmt_iterator gsi = gsi_start_bb (new_return_bb);
1284 gsi_insert_after (&gsi, gimple_build_return (NULL), GSI_NEW_STMT);
1285 while (redirected)
1287 redirected = false;
1288 FOR_EACH_EDGE (e, ei, return_bb->preds)
1289 if (bitmap_bit_p (split_point->split_bbs, e->src->index))
1291 new_return_bb->count += e->count;
1292 new_return_bb->frequency += EDGE_FREQUENCY (e);
1293 redirect_edge_and_branch (e, new_return_bb);
1294 redirected = true;
1295 break;
1298 e = make_edge (new_return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1299 e->probability = REG_BR_PROB_BASE;
1300 e->count = new_return_bb->count;
1301 add_bb_to_loop (new_return_bb, current_loops->tree_root);
1302 bitmap_set_bit (split_point->split_bbs, new_return_bb->index);
1303 retbnd = find_retbnd (return_bb);
1305 /* When we pass around the value, use existing return block. */
1306 else
1308 bitmap_set_bit (split_point->split_bbs, return_bb->index);
1309 retbnd = find_retbnd (return_bb);
1312 /* If RETURN_BB has virtual operand PHIs, they must be removed and the
1313 virtual operand marked for renaming as we change the CFG in a way that
1314 tree-inline is not able to compensate for.
1316 Note this can happen whether or not we have a return value. If we have
1317 a return value, then RETURN_BB may have PHIs for real operands too. */
1318 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1320 bool phi_p = false;
1321 for (gphi_iterator gsi = gsi_start_phis (return_bb);
1322 !gsi_end_p (gsi);)
1324 gphi *stmt = gsi.phi ();
1325 if (!virtual_operand_p (gimple_phi_result (stmt)))
1327 gsi_next (&gsi);
1328 continue;
1330 mark_virtual_phi_result_for_renaming (stmt);
1331 remove_phi_node (&gsi, true);
1332 phi_p = true;
1334 /* In reality we have to rename the reaching definition of the
1335 virtual operand at return_bb as we will eventually release it
1336 when we remove the code region we outlined.
1337 So we have to rename all immediate virtual uses of that region
1338 if we didn't see a PHI definition yet. */
1339 /* ??? In real reality we want to set the reaching vdef of the
1340 entry of the SESE region as the vuse of the call and the reaching
1341 vdef of the exit of the SESE region as the vdef of the call. */
1342 if (!phi_p)
1343 for (gimple_stmt_iterator gsi = gsi_start_bb (return_bb);
1344 !gsi_end_p (gsi);
1345 gsi_next (&gsi))
1347 gimple *stmt = gsi_stmt (gsi);
1348 if (gimple_vuse (stmt))
1350 gimple_set_vuse (stmt, NULL_TREE);
1351 update_stmt (stmt);
1353 if (gimple_vdef (stmt))
1354 break;
1358 /* Now create the actual clone. */
1359 cgraph_edge::rebuild_edges ();
1360 node = cur_node->create_version_clone_with_body
1361 (vNULL, NULL, args_to_skip,
1362 !split_part_return_p || !split_point->split_part_set_retval,
1363 split_point->split_bbs, split_point->entry_bb, "part");
1365 node->split_part = true;
1367 /* Let's take a time profile for splitted function. */
1368 node->tp_first_run = cur_node->tp_first_run + 1;
1370 /* For usual cloning it is enough to clear builtin only when signature
1371 changes. For partial inlining we however can not expect the part
1372 of builtin implementation to have same semantic as the whole. */
1373 if (DECL_BUILT_IN (node->decl))
1375 DECL_BUILT_IN_CLASS (node->decl) = NOT_BUILT_IN;
1376 DECL_FUNCTION_CODE (node->decl) = (enum built_in_function) 0;
1379 /* If return_bb contains any clobbers that refer to SSA_NAMEs
1380 set in the split part, remove them. Also reset debug stmts that
1381 refer to SSA_NAMEs set in the split part. */
1382 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1384 gimple_stmt_iterator gsi = gsi_start_bb (return_bb);
1385 while (!gsi_end_p (gsi))
1387 tree op;
1388 ssa_op_iter iter;
1389 gimple *stmt = gsi_stmt (gsi);
1390 bool remove = false;
1391 if (gimple_clobber_p (stmt) || is_gimple_debug (stmt))
1392 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1394 basic_block bb = gimple_bb (SSA_NAME_DEF_STMT (op));
1395 if (op != retval
1396 && bb
1397 && bb != return_bb
1398 && bitmap_bit_p (split_point->split_bbs, bb->index))
1400 if (is_gimple_debug (stmt))
1402 gimple_debug_bind_reset_value (stmt);
1403 update_stmt (stmt);
1405 else
1406 remove = true;
1407 break;
1410 if (remove)
1411 gsi_remove (&gsi, true);
1412 else
1413 gsi_next (&gsi);
1417 /* If the original function is instrumented then it's
1418 part is also instrumented. */
1419 if (with_bounds)
1420 chkp_function_mark_instrumented (node->decl);
1422 /* If the original function is declared inline, there is no point in issuing
1423 a warning for the non-inlinable part. */
1424 DECL_NO_INLINE_WARNING_P (node->decl) = 1;
1425 cur_node->remove_callees ();
1426 cur_node->remove_all_references ();
1427 if (!split_part_return_p)
1428 TREE_THIS_VOLATILE (node->decl) = 1;
1429 if (dump_file)
1430 dump_function_to_file (node->decl, dump_file, dump_flags);
1432 /* Create the basic block we place call into. It is the entry basic block
1433 split after last label. */
1434 call_bb = split_point->entry_bb;
1435 for (gimple_stmt_iterator gsi = gsi_start_bb (call_bb); !gsi_end_p (gsi);)
1436 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
1438 last_stmt = gsi_stmt (gsi);
1439 gsi_next (&gsi);
1441 else
1442 break;
1443 e = split_block (split_point->entry_bb, last_stmt);
1444 remove_edge (e);
1446 /* Produce the call statement. */
1447 gimple_stmt_iterator gsi = gsi_last_bb (call_bb);
1448 FOR_EACH_VEC_ELT (args_to_pass, i, arg)
1449 if (!is_gimple_val (arg))
1451 arg = force_gimple_operand_gsi (&gsi, arg, true, NULL_TREE,
1452 false, GSI_CONTINUE_LINKING);
1453 args_to_pass[i] = arg;
1455 call = gimple_build_call_vec (node->decl, args_to_pass);
1456 gimple_call_set_with_bounds (call, with_bounds);
1457 gimple_set_block (call, DECL_INITIAL (current_function_decl));
1458 args_to_pass.release ();
1460 /* For optimized away parameters, add on the caller side
1461 before the call
1462 DEBUG D#X => parm_Y(D)
1463 stmts and associate D#X with parm in decl_debug_args_lookup
1464 vector to say for debug info that if parameter parm had been passed,
1465 it would have value parm_Y(D). */
1466 if (args_to_skip)
1468 vec<tree, va_gc> **debug_args = NULL;
1469 unsigned i = 0, len = 0;
1470 if (MAY_HAVE_DEBUG_STMTS)
1472 debug_args = decl_debug_args_lookup (node->decl);
1473 if (debug_args)
1474 len = vec_safe_length (*debug_args);
1476 for (parm = DECL_ARGUMENTS (current_function_decl), num = 0;
1477 parm; parm = DECL_CHAIN (parm), num++)
1478 if (bitmap_bit_p (args_to_skip, num) && is_gimple_reg (parm))
1480 tree ddecl;
1481 gimple *def_temp;
1483 /* This needs to be done even without MAY_HAVE_DEBUG_STMTS,
1484 otherwise if it didn't exist before, we'd end up with
1485 different SSA_NAME_VERSIONs between -g and -g0. */
1486 arg = get_or_create_ssa_default_def (cfun, parm);
1487 if (!MAY_HAVE_DEBUG_STMTS || debug_args == NULL)
1488 continue;
1490 while (i < len && (**debug_args)[i] != DECL_ORIGIN (parm))
1491 i += 2;
1492 if (i >= len)
1493 continue;
1494 ddecl = (**debug_args)[i + 1];
1495 def_temp
1496 = gimple_build_debug_bind (ddecl, unshare_expr (arg), call);
1497 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
1501 /* We avoid address being taken on any variable used by split part,
1502 so return slot optimization is always possible. Moreover this is
1503 required to make DECL_BY_REFERENCE work. */
1504 if (aggregate_value_p (DECL_RESULT (current_function_decl),
1505 TREE_TYPE (current_function_decl))
1506 && (!is_gimple_reg_type (TREE_TYPE (DECL_RESULT (current_function_decl)))
1507 || DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))))
1508 gimple_call_set_return_slot_opt (call, true);
1510 if (add_tsan_func_exit)
1511 tsan_func_exit_call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
1513 /* Update return value. This is bit tricky. When we do not return,
1514 do nothing. When we return we might need to update return_bb
1515 or produce a new return statement. */
1516 if (!split_part_return_p)
1518 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1519 if (tsan_func_exit_call)
1520 gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1522 else
1524 e = make_edge (call_bb, return_bb,
1525 return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1526 ? 0 : EDGE_FALLTHRU);
1527 e->count = call_bb->count;
1528 e->probability = REG_BR_PROB_BASE;
1530 /* If there is return basic block, see what value we need to store
1531 return value into and put call just before it. */
1532 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1534 real_retval = retval;
1535 if (real_retval && split_point->split_part_set_retval)
1537 gphi_iterator psi;
1539 /* See if we need new SSA_NAME for the result.
1540 When DECL_BY_REFERENCE is true, retval is actually pointer to
1541 return value and it is constant in whole function. */
1542 if (TREE_CODE (retval) == SSA_NAME
1543 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1545 retval = copy_ssa_name (retval, call);
1547 /* See if there is PHI defining return value. */
1548 for (psi = gsi_start_phis (return_bb);
1549 !gsi_end_p (psi); gsi_next (&psi))
1550 if (!virtual_operand_p (gimple_phi_result (psi.phi ())))
1551 break;
1553 /* When there is PHI, just update its value. */
1554 if (TREE_CODE (retval) == SSA_NAME
1555 && !gsi_end_p (psi))
1556 add_phi_arg (psi.phi (), retval, e, UNKNOWN_LOCATION);
1557 /* Otherwise update the return BB itself.
1558 find_return_bb allows at most one assignment to return value,
1559 so update first statement. */
1560 else
1562 gimple_stmt_iterator bsi;
1563 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
1564 gsi_next (&bsi))
1565 if (greturn *return_stmt
1566 = dyn_cast <greturn *> (gsi_stmt (bsi)))
1568 gimple_return_set_retval (return_stmt, retval);
1569 break;
1571 else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
1572 && !gimple_clobber_p (gsi_stmt (bsi)))
1574 gimple_assign_set_rhs1 (gsi_stmt (bsi), retval);
1575 break;
1577 update_stmt (gsi_stmt (bsi));
1578 /* Also adjust clobbers and debug stmts in return_bb. */
1579 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
1580 gsi_next (&bsi))
1582 gimple *stmt = gsi_stmt (bsi);
1583 if (gimple_clobber_p (stmt)
1584 || is_gimple_debug (stmt))
1586 ssa_op_iter iter;
1587 use_operand_p use_p;
1588 bool update = false;
1589 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
1590 SSA_OP_USE)
1591 if (USE_FROM_PTR (use_p) == real_retval)
1593 SET_USE (use_p, retval);
1594 update = true;
1596 if (update)
1597 update_stmt (stmt);
1602 /* Replace retbnd with new one. */
1603 if (retbnd)
1605 gimple_stmt_iterator bsi;
1606 for (bsi = gsi_last_bb (return_bb); !gsi_end_p (bsi);
1607 gsi_prev (&bsi))
1608 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_RETURN)
1610 retbnd = copy_ssa_name (retbnd, call);
1611 gimple_return_set_retbnd (gsi_stmt (bsi), retbnd);
1612 update_stmt (gsi_stmt (bsi));
1613 break;
1617 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1619 gimple_call_set_lhs (call, build_simple_mem_ref (retval));
1620 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1622 else
1624 tree restype;
1625 restype = TREE_TYPE (DECL_RESULT (current_function_decl));
1626 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1627 if (!useless_type_conversion_p (TREE_TYPE (retval), restype))
1629 gimple *cpy;
1630 tree tem = create_tmp_reg (restype);
1631 tem = make_ssa_name (tem, call);
1632 cpy = gimple_build_assign (retval, NOP_EXPR, tem);
1633 gsi_insert_after (&gsi, cpy, GSI_NEW_STMT);
1634 retval = tem;
1636 /* Build bndret call to obtain returned bounds. */
1637 if (retbnd)
1638 chkp_insert_retbnd_call (retbnd, retval, &gsi);
1639 gimple_call_set_lhs (call, retval);
1640 update_stmt (call);
1643 else
1644 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1645 if (tsan_func_exit_call)
1646 gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1648 /* We don't use return block (there is either no return in function or
1649 multiple of them). So create new basic block with return statement.
1651 else
1653 greturn *ret;
1654 if (split_point->split_part_set_retval
1655 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1657 retval = DECL_RESULT (current_function_decl);
1659 if (chkp_function_instrumented_p (current_function_decl)
1660 && BOUNDED_P (retval))
1661 retbnd = create_tmp_reg (pointer_bounds_type_node);
1663 /* We use temporary register to hold value when aggregate_value_p
1664 is false. Similarly for DECL_BY_REFERENCE we must avoid extra
1665 copy. */
1666 if (!aggregate_value_p (retval, TREE_TYPE (current_function_decl))
1667 && !DECL_BY_REFERENCE (retval))
1668 retval = create_tmp_reg (TREE_TYPE (retval));
1669 if (is_gimple_reg (retval))
1671 /* When returning by reference, there is only one SSA name
1672 assigned to RESULT_DECL (that is pointer to return value).
1673 Look it up or create new one if it is missing. */
1674 if (DECL_BY_REFERENCE (retval))
1675 retval = get_or_create_ssa_default_def (cfun, retval);
1676 /* Otherwise produce new SSA name for return value. */
1677 else
1678 retval = make_ssa_name (retval, call);
1680 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1681 gimple_call_set_lhs (call, build_simple_mem_ref (retval));
1682 else
1683 gimple_call_set_lhs (call, retval);
1684 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1686 else
1688 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1689 if (retval
1690 && is_gimple_reg_type (TREE_TYPE (retval))
1691 && !is_gimple_val (retval))
1693 gassign *g
1694 = gimple_build_assign (make_ssa_name (TREE_TYPE (retval)),
1695 retval);
1696 retval = gimple_assign_lhs (g);
1697 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1700 /* Build bndret call to obtain returned bounds. */
1701 if (retbnd)
1702 chkp_insert_retbnd_call (retbnd, retval, &gsi);
1703 if (tsan_func_exit_call)
1704 gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1705 ret = gimple_build_return (retval);
1706 gsi_insert_after (&gsi, ret, GSI_NEW_STMT);
1709 free_dominance_info (CDI_DOMINATORS);
1710 free_dominance_info (CDI_POST_DOMINATORS);
1711 compute_inline_parameters (node, true);
1714 /* Execute function splitting pass. */
1716 static unsigned int
1717 execute_split_functions (void)
1719 gimple_stmt_iterator bsi;
1720 basic_block bb;
1721 int overall_time = 0, overall_size = 0;
1722 int todo = 0;
1723 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1725 if (flags_from_decl_or_type (current_function_decl)
1726 & (ECF_NORETURN|ECF_MALLOC))
1728 if (dump_file)
1729 fprintf (dump_file, "Not splitting: noreturn/malloc function.\n");
1730 return 0;
1732 if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
1734 if (dump_file)
1735 fprintf (dump_file, "Not splitting: main function.\n");
1736 return 0;
1738 /* This can be relaxed; function might become inlinable after splitting
1739 away the uninlinable part. */
1740 if (inline_edge_summary_vec.exists ()
1741 && !inline_summaries->get (node)->inlinable)
1743 if (dump_file)
1744 fprintf (dump_file, "Not splitting: not inlinable.\n");
1745 return 0;
1747 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
1749 if (dump_file)
1750 fprintf (dump_file, "Not splitting: disregarding inline limits.\n");
1751 return 0;
1753 /* This can be relaxed; most of versioning tests actually prevents
1754 a duplication. */
1755 if (!tree_versionable_function_p (current_function_decl))
1757 if (dump_file)
1758 fprintf (dump_file, "Not splitting: not versionable.\n");
1759 return 0;
1761 /* FIXME: we could support this. */
1762 if (DECL_STRUCT_FUNCTION (current_function_decl)->static_chain_decl)
1764 if (dump_file)
1765 fprintf (dump_file, "Not splitting: nested function.\n");
1766 return 0;
1769 /* See if it makes sense to try to split.
1770 It makes sense to split if we inline, that is if we have direct calls to
1771 handle or direct calls are possibly going to appear as result of indirect
1772 inlining or LTO. Also handle -fprofile-generate as LTO to allow non-LTO
1773 training for LTO -fprofile-use build.
1775 Note that we are not completely conservative about disqualifying functions
1776 called once. It is possible that the caller is called more then once and
1777 then inlining would still benefit. */
1778 if ((!node->callers
1779 /* Local functions called once will be completely inlined most of time. */
1780 || (!node->callers->next_caller && node->local.local))
1781 && !node->address_taken
1782 && !node->has_aliases_p ()
1783 && (!flag_lto || !node->externally_visible))
1785 if (dump_file)
1786 fprintf (dump_file, "Not splitting: not called directly "
1787 "or called once.\n");
1788 return 0;
1791 /* FIXME: We can actually split if splitting reduces call overhead. */
1792 if (!flag_inline_small_functions
1793 && !DECL_DECLARED_INLINE_P (current_function_decl))
1795 if (dump_file)
1796 fprintf (dump_file, "Not splitting: not autoinlining and function"
1797 " is not inline.\n");
1798 return 0;
1801 /* We enforce splitting after loop headers when profile info is not
1802 available. */
1803 if (profile_status_for_fn (cfun) != PROFILE_READ)
1804 mark_dfs_back_edges ();
1806 /* Initialize bitmap to track forbidden calls. */
1807 forbidden_dominators = BITMAP_ALLOC (NULL);
1808 calculate_dominance_info (CDI_DOMINATORS);
1810 /* Compute local info about basic blocks and determine function size/time. */
1811 bb_info_vec.safe_grow_cleared (last_basic_block_for_fn (cfun) + 1);
1812 memset (&best_split_point, 0, sizeof (best_split_point));
1813 basic_block return_bb = find_return_bb ();
1814 int tsan_exit_found = -1;
1815 FOR_EACH_BB_FN (bb, cfun)
1817 int time = 0;
1818 int size = 0;
1819 int freq = compute_call_stmt_bb_frequency (current_function_decl, bb);
1821 if (dump_file && (dump_flags & TDF_DETAILS))
1822 fprintf (dump_file, "Basic block %i\n", bb->index);
1824 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1826 int this_time, this_size;
1827 gimple *stmt = gsi_stmt (bsi);
1829 this_size = estimate_num_insns (stmt, &eni_size_weights);
1830 this_time = estimate_num_insns (stmt, &eni_time_weights) * freq;
1831 size += this_size;
1832 time += this_time;
1833 check_forbidden_calls (stmt);
1835 if (dump_file && (dump_flags & TDF_DETAILS))
1837 fprintf (dump_file, " freq:%6i size:%3i time:%3i ",
1838 freq, this_size, this_time);
1839 print_gimple_stmt (dump_file, stmt, 0, 0);
1842 if ((flag_sanitize & SANITIZE_THREAD)
1843 && is_gimple_call (stmt)
1844 && gimple_call_internal_p (stmt)
1845 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1847 /* We handle TSAN_FUNC_EXIT for splitting either in the
1848 return_bb, or in its immediate predecessors. */
1849 if ((bb != return_bb && !find_edge (bb, return_bb))
1850 || (tsan_exit_found != -1
1851 && tsan_exit_found != (bb != return_bb)))
1853 if (dump_file)
1854 fprintf (dump_file, "Not splitting: TSAN_FUNC_EXIT"
1855 " in unexpected basic block.\n");
1856 BITMAP_FREE (forbidden_dominators);
1857 bb_info_vec.release ();
1858 return 0;
1860 tsan_exit_found = bb != return_bb;
1863 overall_time += time;
1864 overall_size += size;
1865 bb_info_vec[bb->index].time = time;
1866 bb_info_vec[bb->index].size = size;
1868 find_split_points (return_bb, overall_time, overall_size);
1869 if (best_split_point.split_bbs)
1871 split_function (return_bb, &best_split_point, tsan_exit_found == 1);
1872 BITMAP_FREE (best_split_point.ssa_names_to_pass);
1873 BITMAP_FREE (best_split_point.split_bbs);
1874 todo = TODO_update_ssa | TODO_cleanup_cfg;
1876 BITMAP_FREE (forbidden_dominators);
1877 bb_info_vec.release ();
1878 return todo;
1881 namespace {
1883 const pass_data pass_data_split_functions =
1885 GIMPLE_PASS, /* type */
1886 "fnsplit", /* name */
1887 OPTGROUP_NONE, /* optinfo_flags */
1888 TV_IPA_FNSPLIT, /* tv_id */
1889 PROP_cfg, /* properties_required */
1890 0, /* properties_provided */
1891 0, /* properties_destroyed */
1892 0, /* todo_flags_start */
1893 0, /* todo_flags_finish */
1896 class pass_split_functions : public gimple_opt_pass
1898 public:
1899 pass_split_functions (gcc::context *ctxt)
1900 : gimple_opt_pass (pass_data_split_functions, ctxt)
1903 /* opt_pass methods: */
1904 virtual bool gate (function *);
1905 virtual unsigned int execute (function *)
1907 return execute_split_functions ();
1910 }; // class pass_split_functions
1912 bool
1913 pass_split_functions::gate (function *)
1915 /* When doing profile feedback, we want to execute the pass after profiling
1916 is read. So disable one in early optimization. */
1917 return (flag_partial_inlining
1918 && !profile_arc_flag && !flag_branch_probabilities);
1921 } // anon namespace
1923 gimple_opt_pass *
1924 make_pass_split_functions (gcc::context *ctxt)
1926 return new pass_split_functions (ctxt);
1929 /* Execute function splitting pass. */
1931 static unsigned int
1932 execute_feedback_split_functions (void)
1934 unsigned int retval = execute_split_functions ();
1935 if (retval)
1936 retval |= TODO_rebuild_cgraph_edges;
1937 return retval;
1940 namespace {
1942 const pass_data pass_data_feedback_split_functions =
1944 GIMPLE_PASS, /* type */
1945 "feedback_fnsplit", /* name */
1946 OPTGROUP_NONE, /* optinfo_flags */
1947 TV_IPA_FNSPLIT, /* tv_id */
1948 PROP_cfg, /* properties_required */
1949 0, /* properties_provided */
1950 0, /* properties_destroyed */
1951 0, /* todo_flags_start */
1952 0, /* todo_flags_finish */
1955 class pass_feedback_split_functions : public gimple_opt_pass
1957 public:
1958 pass_feedback_split_functions (gcc::context *ctxt)
1959 : gimple_opt_pass (pass_data_feedback_split_functions, ctxt)
1962 /* opt_pass methods: */
1963 virtual bool gate (function *);
1964 virtual unsigned int execute (function *)
1966 return execute_feedback_split_functions ();
1969 }; // class pass_feedback_split_functions
1971 bool
1972 pass_feedback_split_functions::gate (function *)
1974 /* We don't need to split when profiling at all, we are producing
1975 lousy code anyway. */
1976 return (flag_partial_inlining
1977 && flag_branch_probabilities);
1980 } // anon namespace
1982 gimple_opt_pass *
1983 make_pass_feedback_split_functions (gcc::context *ctxt)
1985 return new pass_feedback_split_functions (ctxt);