1 /* Function splitting pass
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka <jh@suse.cz>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* The purpose of this pass is to split function bodies to improve
22 inlining. I.e. for function of the form:
47 When func becomes inlinable and when cheap_test is often true, inlining func,
48 but not fund.part leads to performance improvement similar as inlining
49 original func while the code size growth is smaller.
51 The pass is organized in three stages:
52 1) Collect local info about basic block into BB_INFO structure and
53 compute function body estimated size and time.
54 2) Via DFS walk find all possible basic blocks where we can split
56 3) If split point is found, split at the specified BB by creating a clone
57 and updating function to call it.
59 The decisions what functions to split are in execute_split_functions
62 There are several possible future improvements for this pass including:
64 1) Splitting to break up large functions
65 2) Splitting to reduce stack frame usage
66 3) Allow split part of function to use values computed in the header part.
67 The values needs to be passed to split function, perhaps via same
68 interface as for nested functions or as argument.
69 4) Support for simple rematerialization. I.e. when split part use
70 value computed in header from function parameter in very cheap way, we
71 can just recompute it.
72 5) Support splitting of nested functions.
73 6) Support non-SSA arguments.
74 7) There is nothing preventing us from producing multiple parts of single function
75 when needed or splitting also the parts. */
79 #include "coretypes.h"
85 #include "alloc-pool.h"
86 #include "tree-pass.h"
89 #include "diagnostic.h"
90 #include "fold-const.h"
94 #include "gimple-iterator.h"
95 #include "gimplify-me.h"
96 #include "gimple-walk.h"
97 #include "symbol-summary.h"
100 #include "tree-into-ssa.h"
101 #include "tree-dfa.h"
102 #include "tree-inline.h"
104 #include "gimple-pretty-print.h"
105 #include "ipa-fnsummary.h"
108 /* Per basic block info. */
116 static vec
<split_bb_info
> bb_info_vec
;
118 /* Description of split point. */
122 /* Size of the partitions. */
123 sreal header_time
, split_time
;
124 unsigned int header_size
, split_size
;
126 /* SSA names that need to be passed into spit function. */
127 bitmap ssa_names_to_pass
;
129 /* Basic block where we split (that will become entry point of new function. */
130 basic_block entry_bb
;
132 /* Count for entering the split part.
133 This is not count of the entry_bb because it may be in loop. */
136 /* Basic blocks we are splitting away. */
139 /* True when return value is computed on split part and thus it needs
141 bool split_part_set_retval
;
144 /* Best split point found. */
146 struct split_point best_split_point
;
148 /* Set of basic blocks that are not allowed to dominate a split point. */
150 static bitmap forbidden_dominators
;
152 static tree
find_retval (basic_block return_bb
);
154 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
155 variable, check it if it is present in bitmap passed via DATA. */
158 test_nonssa_use (gimple
*, tree t
, tree
, void *data
)
160 t
= get_base_address (t
);
162 if (!t
|| is_gimple_reg (t
))
165 if (TREE_CODE (t
) == PARM_DECL
167 && auto_var_in_fn_p (t
, current_function_decl
))
168 || TREE_CODE (t
) == RESULT_DECL
169 /* Normal labels are part of CFG and will be handled gratefuly.
170 Forced labels however can be used directly by statements and
171 need to stay in one partition along with their uses. */
172 || (TREE_CODE (t
) == LABEL_DECL
173 && FORCED_LABEL (t
)))
174 return bitmap_bit_p ((bitmap
)data
, DECL_UID (t
));
176 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
177 to pretend that the value pointed to is actual result decl. */
178 if ((TREE_CODE (t
) == MEM_REF
|| INDIRECT_REF_P (t
))
179 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
180 && SSA_NAME_VAR (TREE_OPERAND (t
, 0))
181 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t
, 0))) == RESULT_DECL
182 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
184 bitmap_bit_p ((bitmap
)data
,
185 DECL_UID (DECL_RESULT (current_function_decl
)));
190 /* Dump split point CURRENT. */
193 dump_split_point (FILE * file
, struct split_point
*current
)
196 "Split point at BB %i\n"
197 " header time: %f header size: %i\n"
198 " split time: %f split size: %i\n bbs: ",
199 current
->entry_bb
->index
, current
->header_time
.to_double (),
200 current
->header_size
, current
->split_time
.to_double (),
201 current
->split_size
);
202 dump_bitmap (file
, current
->split_bbs
);
203 fprintf (file
, " SSA names to pass: ");
204 dump_bitmap (file
, current
->ssa_names_to_pass
);
207 /* Look for all BBs in header that might lead to the split part and verify
208 that they are not defining any non-SSA var used by the split part.
209 Parameters are the same as for consider_split. */
212 verify_non_ssa_vars (struct split_point
*current
, bitmap non_ssa_vars
,
213 basic_block return_bb
)
215 bitmap seen
= BITMAP_ALLOC (NULL
);
216 vec
<basic_block
> worklist
= vNULL
;
222 FOR_EACH_EDGE (e
, ei
, current
->entry_bb
->preds
)
223 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
224 && !bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
226 worklist
.safe_push (e
->src
);
227 bitmap_set_bit (seen
, e
->src
->index
);
230 while (!worklist
.is_empty ())
232 bb
= worklist
.pop ();
233 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
234 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
235 && bitmap_set_bit (seen
, e
->src
->index
))
237 gcc_checking_assert (!bitmap_bit_p (current
->split_bbs
,
239 worklist
.safe_push (e
->src
);
241 for (gimple_stmt_iterator bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
);
244 gimple
*stmt
= gsi_stmt (bsi
);
245 if (is_gimple_debug (stmt
))
247 if (walk_stmt_load_store_addr_ops
248 (stmt
, non_ssa_vars
, test_nonssa_use
, test_nonssa_use
,
254 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
255 if (test_nonssa_use (stmt
, gimple_label_label (label_stmt
),
256 NULL_TREE
, non_ssa_vars
))
262 for (gphi_iterator bsi
= gsi_start_phis (bb
); !gsi_end_p (bsi
);
265 if (walk_stmt_load_store_addr_ops
266 (gsi_stmt (bsi
), non_ssa_vars
, test_nonssa_use
, test_nonssa_use
,
273 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
275 if (e
->dest
!= return_bb
)
277 for (gphi_iterator bsi
= gsi_start_phis (return_bb
);
281 gphi
*stmt
= bsi
.phi ();
282 tree op
= gimple_phi_arg_def (stmt
, e
->dest_idx
);
284 if (virtual_operand_p (gimple_phi_result (stmt
)))
286 if (TREE_CODE (op
) != SSA_NAME
287 && test_nonssa_use (stmt
, op
, op
, non_ssa_vars
))
296 /* Verify that the rest of function does not define any label
297 used by the split part. */
298 FOR_EACH_BB_FN (bb
, cfun
)
299 if (!bitmap_bit_p (current
->split_bbs
, bb
->index
)
300 && !bitmap_bit_p (seen
, bb
->index
))
302 gimple_stmt_iterator bsi
;
303 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
304 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (bsi
)))
306 if (test_nonssa_use (label_stmt
,
307 gimple_label_label (label_stmt
),
308 NULL_TREE
, non_ssa_vars
))
324 /* If STMT is a call, check the callee against a list of forbidden
325 predicate functions. If a match is found, look for uses of the
326 call result in condition statements that compare against zero.
327 For each such use, find the block targeted by the condition
328 statement for the nonzero result, and set the bit for this block
329 in the forbidden dominators bitmap. The purpose of this is to avoid
330 selecting a split point where we are likely to lose the chance
331 to optimize away an unused function call. */
334 check_forbidden_calls (gimple
*stmt
)
336 imm_use_iterator use_iter
;
340 /* At the moment, __builtin_constant_p is the only forbidden
341 predicate function call (see PR49642). */
342 if (!gimple_call_builtin_p (stmt
, BUILT_IN_CONSTANT_P
))
345 lhs
= gimple_call_lhs (stmt
);
347 if (!lhs
|| TREE_CODE (lhs
) != SSA_NAME
)
350 FOR_EACH_IMM_USE_FAST (use_p
, use_iter
, lhs
)
353 basic_block use_bb
, forbidden_bb
;
355 edge true_edge
, false_edge
;
358 use_stmt
= dyn_cast
<gcond
*> (USE_STMT (use_p
));
362 /* Assuming canonical form for GIMPLE_COND here, with constant
363 in second position. */
364 op1
= gimple_cond_rhs (use_stmt
);
365 code
= gimple_cond_code (use_stmt
);
366 use_bb
= gimple_bb (use_stmt
);
368 extract_true_false_edges_from_block (use_bb
, &true_edge
, &false_edge
);
370 /* We're only interested in comparisons that distinguish
371 unambiguously from zero. */
372 if (!integer_zerop (op1
) || code
== LE_EXPR
|| code
== GE_EXPR
)
376 forbidden_bb
= false_edge
->dest
;
378 forbidden_bb
= true_edge
->dest
;
380 bitmap_set_bit (forbidden_dominators
, forbidden_bb
->index
);
384 /* If BB is dominated by any block in the forbidden dominators set,
385 return TRUE; else FALSE. */
388 dominated_by_forbidden (basic_block bb
)
393 EXECUTE_IF_SET_IN_BITMAP (forbidden_dominators
, 1, dom_bb
, bi
)
395 if (dominated_by_p (CDI_DOMINATORS
, bb
,
396 BASIC_BLOCK_FOR_FN (cfun
, dom_bb
)))
403 /* For give split point CURRENT and return block RETURN_BB return 1
404 if ssa name VAL is set by split part and 0 otherwise. */
406 split_part_set_ssa_name_p (tree val
, struct split_point
*current
,
407 basic_block return_bb
)
409 if (TREE_CODE (val
) != SSA_NAME
)
412 return (!SSA_NAME_IS_DEFAULT_DEF (val
)
413 && (bitmap_bit_p (current
->split_bbs
,
414 gimple_bb (SSA_NAME_DEF_STMT (val
))->index
)
415 || gimple_bb (SSA_NAME_DEF_STMT (val
)) == return_bb
));
418 /* We found an split_point CURRENT. NON_SSA_VARS is bitmap of all non ssa
419 variables used and RETURN_BB is return basic block.
420 See if we can split function here. */
423 consider_split (struct split_point
*current
, bitmap non_ssa_vars
,
424 basic_block return_bb
)
427 unsigned int num_args
= 0;
428 unsigned int call_overhead
;
434 bool back_edge
= false;
436 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
437 dump_split_point (dump_file
, current
);
439 current
->count
= profile_count::zero ();
440 FOR_EACH_EDGE (e
, ei
, current
->entry_bb
->preds
)
442 if (e
->flags
& EDGE_DFS_BACK
)
444 if (!bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
445 current
->count
+= e
->count ();
448 /* Do not split when we would end up calling function anyway.
449 Compares are three state, use !(...<...) to also give up when outcome
452 < (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
.apply_scale
453 (PARAM_VALUE (PARAM_PARTIAL_INLINING_ENTRY_PROBABILITY
), 100))))
455 /* When profile is guessed, we can not expect it to give us
456 realistic estimate on likelyness of function taking the
457 complex path. As a special case, when tail of the function is
458 a loop, enable splitting since inlining code skipping the loop
459 is likely noticeable win. */
461 && profile_status_for_fn (cfun
) != PROFILE_READ
463 < ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
)
465 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
468 " Split before loop, accepting despite low counts");
469 current
->count
.dump (dump_file
);
470 fprintf (dump_file
, " ");
471 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
.dump (dump_file
);
476 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
478 " Refused: incoming frequency is too large.\n");
483 if (!current
->header_size
)
485 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
486 fprintf (dump_file
, " Refused: header empty\n");
490 /* Verify that PHI args on entry are either virtual or all their operands
491 incoming from header are the same. */
492 for (bsi
= gsi_start_phis (current
->entry_bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
494 gphi
*stmt
= bsi
.phi ();
497 if (virtual_operand_p (gimple_phi_result (stmt
)))
499 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
501 edge e
= gimple_phi_arg_edge (stmt
, i
);
502 if (!bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
504 tree edge_val
= gimple_phi_arg_def (stmt
, i
);
505 if (val
&& edge_val
!= val
)
507 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
509 " Refused: entry BB has PHI with multiple variants\n");
518 /* See what argument we will pass to the split function and compute
520 call_overhead
= eni_size_weights
.call_cost
;
521 for (parm
= DECL_ARGUMENTS (current_function_decl
); parm
;
522 parm
= DECL_CHAIN (parm
))
524 if (!is_gimple_reg (parm
))
526 if (bitmap_bit_p (non_ssa_vars
, DECL_UID (parm
)))
528 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
530 " Refused: need to pass non-ssa param values\n");
536 tree ddef
= ssa_default_def (cfun
, parm
);
538 && bitmap_bit_p (current
->ssa_names_to_pass
,
539 SSA_NAME_VERSION (ddef
)))
541 if (!VOID_TYPE_P (TREE_TYPE (parm
)))
542 call_overhead
+= estimate_move_cost (TREE_TYPE (parm
), false);
547 if (!VOID_TYPE_P (TREE_TYPE (current_function_decl
)))
548 call_overhead
+= estimate_move_cost (TREE_TYPE (current_function_decl
),
551 if (current
->split_size
<= call_overhead
)
553 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
555 " Refused: split size is smaller than call overhead\n");
558 /* FIXME: The logic here is not very precise, because inliner does use
559 inline predicates to reduce function body size. We add 10 to anticipate
560 that. Next stage1 we should try to be more meaningful here. */
561 if (current
->header_size
+ call_overhead
562 >= (unsigned int)(DECL_DECLARED_INLINE_P (current_function_decl
)
563 ? MAX_INLINE_INSNS_SINGLE
564 : MAX_INLINE_INSNS_AUTO
) + 10)
566 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
568 " Refused: header size is too large for inline candidate\n");
572 /* Splitting functions brings the target out of comdat group; this will
573 lead to code duplication if the function is reused by other unit.
574 Limit this duplication. This is consistent with limit in tree-sra.c
575 FIXME: with LTO we ought to be able to do better! */
576 if (DECL_ONE_ONLY (current_function_decl
)
577 && current
->split_size
>= (unsigned int) MAX_INLINE_INSNS_AUTO
+ 10)
579 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
581 " Refused: function is COMDAT and tail is too large\n");
584 /* For comdat functions also reject very small tails; those will likely get
585 inlined back and we do not want to risk the duplication overhead.
586 FIXME: with LTO we ought to be able to do better! */
587 if (DECL_ONE_ONLY (current_function_decl
)
588 && current
->split_size
589 <= (unsigned int) PARAM_VALUE (PARAM_EARLY_INLINING_INSNS
) / 2)
591 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
593 " Refused: function is COMDAT and tail is too small\n");
597 /* FIXME: we currently can pass only SSA function parameters to the split
598 arguments. Once parm_adjustment infrastructure is supported by cloning,
599 we can pass more than that. */
600 if (num_args
!= bitmap_count_bits (current
->ssa_names_to_pass
))
603 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
605 " Refused: need to pass non-param values\n");
609 /* When there are non-ssa vars used in the split region, see if they
610 are used in the header region. If so, reject the split.
611 FIXME: we can use nested function support to access both. */
612 if (!bitmap_empty_p (non_ssa_vars
)
613 && !verify_non_ssa_vars (current
, non_ssa_vars
, return_bb
))
615 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
617 " Refused: split part has non-ssa uses\n");
621 /* If the split point is dominated by a forbidden block, reject
623 if (!bitmap_empty_p (forbidden_dominators
)
624 && dominated_by_forbidden (current
->entry_bb
))
626 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
628 " Refused: split point dominated by forbidden block\n");
632 /* See if retval used by return bb is computed by header or split part.
633 When it is computed by split part, we need to produce return statement
634 in the split part and add code to header to pass it around.
636 This is bit tricky to test:
637 1) When there is no return_bb or no return value, we always pass
639 2) Invariants are always computed by caller.
640 3) For SSA we need to look if defining statement is in header or split part
641 4) For non-SSA we need to look where the var is computed. */
642 retval
= find_retval (return_bb
);
645 /* If there is a return_bb with no return value in function returning
646 value by reference, also make the split part return void, otherwise
647 we expansion would try to create a non-POD temporary, which is
649 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
650 && DECL_RESULT (current_function_decl
)
651 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
652 current
->split_part_set_retval
= false;
654 current
->split_part_set_retval
= true;
656 else if (is_gimple_min_invariant (retval
))
657 current
->split_part_set_retval
= false;
658 /* Special case is value returned by reference we record as if it was non-ssa
659 set to result_decl. */
660 else if (TREE_CODE (retval
) == SSA_NAME
661 && SSA_NAME_VAR (retval
)
662 && TREE_CODE (SSA_NAME_VAR (retval
)) == RESULT_DECL
663 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
664 current
->split_part_set_retval
665 = bitmap_bit_p (non_ssa_vars
, DECL_UID (SSA_NAME_VAR (retval
)));
666 else if (TREE_CODE (retval
) == SSA_NAME
)
667 current
->split_part_set_retval
668 = split_part_set_ssa_name_p (retval
, current
, return_bb
);
669 else if (TREE_CODE (retval
) == PARM_DECL
)
670 current
->split_part_set_retval
= false;
671 else if (VAR_P (retval
)
672 || TREE_CODE (retval
) == RESULT_DECL
)
673 current
->split_part_set_retval
674 = bitmap_bit_p (non_ssa_vars
, DECL_UID (retval
));
676 current
->split_part_set_retval
= true;
678 /* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
679 for the return value. If there are other PHIs, give up. */
680 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
684 for (psi
= gsi_start_phis (return_bb
); !gsi_end_p (psi
); gsi_next (&psi
))
685 if (!virtual_operand_p (gimple_phi_result (psi
.phi ()))
687 && current
->split_part_set_retval
688 && TREE_CODE (retval
) == SSA_NAME
689 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
))
690 && SSA_NAME_DEF_STMT (retval
) == psi
.phi ()))
692 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
694 " Refused: return bb has extra PHIs\n");
699 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
700 fprintf (dump_file
, " Accepted!\n");
702 /* At the moment chose split point with lowest count and that leaves
703 out smallest size of header.
704 In future we might re-consider this heuristics. */
705 if (!best_split_point
.split_bbs
706 || best_split_point
.count
708 || (best_split_point
.count
== current
->count
709 && best_split_point
.split_size
< current
->split_size
))
712 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
713 fprintf (dump_file
, " New best split point!\n");
714 if (best_split_point
.ssa_names_to_pass
)
716 BITMAP_FREE (best_split_point
.ssa_names_to_pass
);
717 BITMAP_FREE (best_split_point
.split_bbs
);
719 best_split_point
= *current
;
720 best_split_point
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
721 bitmap_copy (best_split_point
.ssa_names_to_pass
,
722 current
->ssa_names_to_pass
);
723 best_split_point
.split_bbs
= BITMAP_ALLOC (NULL
);
724 bitmap_copy (best_split_point
.split_bbs
, current
->split_bbs
);
728 /* Return basic block containing RETURN statement. We allow basic blocks
732 but return_bb can not be more complex than this (except for
733 -fsanitize=thread we allow TSAN_FUNC_EXIT () internal call in there).
734 If nothing is found, return the exit block.
736 When there are multiple RETURN statement, chose one with return value,
737 since that one is more likely shared by multiple code paths.
739 Return BB is special, because for function splitting it is the only
740 basic block that is duplicated in between header and split part of the
743 TODO: We might support multiple return blocks. */
746 find_return_bb (void)
749 basic_block return_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
);
750 gimple_stmt_iterator bsi
;
751 bool found_return
= false;
752 tree retval
= NULL_TREE
;
754 if (!single_pred_p (EXIT_BLOCK_PTR_FOR_FN (cfun
)))
757 e
= single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
));
758 for (bsi
= gsi_last_bb (e
->src
); !gsi_end_p (bsi
); gsi_prev (&bsi
))
760 gimple
*stmt
= gsi_stmt (bsi
);
761 if (gimple_code (stmt
) == GIMPLE_LABEL
762 || is_gimple_debug (stmt
)
763 || gimple_clobber_p (stmt
))
765 else if (gimple_code (stmt
) == GIMPLE_ASSIGN
767 && gimple_assign_single_p (stmt
)
768 && (auto_var_in_fn_p (gimple_assign_rhs1 (stmt
),
769 current_function_decl
)
770 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
771 && retval
== gimple_assign_lhs (stmt
))
773 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
776 retval
= gimple_return_retval (return_stmt
);
778 /* For -fsanitize=thread, allow also TSAN_FUNC_EXIT () in the return
780 else if ((flag_sanitize
& SANITIZE_THREAD
)
781 && gimple_call_internal_p (stmt
, IFN_TSAN_FUNC_EXIT
))
786 if (gsi_end_p (bsi
) && found_return
)
792 /* Given return basic block RETURN_BB, see where return value is really
795 find_retval (basic_block return_bb
)
797 gimple_stmt_iterator bsi
;
798 for (bsi
= gsi_start_bb (return_bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
799 if (greturn
*return_stmt
= dyn_cast
<greturn
*> (gsi_stmt (bsi
)))
800 return gimple_return_retval (return_stmt
);
801 else if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_ASSIGN
802 && !gimple_clobber_p (gsi_stmt (bsi
)))
803 return gimple_assign_rhs1 (gsi_stmt (bsi
));
807 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
808 variable, mark it as used in bitmap passed via DATA.
809 Return true when access to T prevents splitting the function. */
812 mark_nonssa_use (gimple
*, tree t
, tree
, void *data
)
814 t
= get_base_address (t
);
816 if (!t
|| is_gimple_reg (t
))
819 /* At present we can't pass non-SSA arguments to split function.
820 FIXME: this can be relaxed by passing references to arguments. */
821 if (TREE_CODE (t
) == PARM_DECL
)
823 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
825 "Cannot split: use of non-ssa function parameter.\n");
829 if ((VAR_P (t
) && auto_var_in_fn_p (t
, current_function_decl
))
830 || TREE_CODE (t
) == RESULT_DECL
831 || (TREE_CODE (t
) == LABEL_DECL
&& FORCED_LABEL (t
)))
832 bitmap_set_bit ((bitmap
)data
, DECL_UID (t
));
834 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
835 to pretend that the value pointed to is actual result decl. */
836 if ((TREE_CODE (t
) == MEM_REF
|| INDIRECT_REF_P (t
))
837 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
838 && SSA_NAME_VAR (TREE_OPERAND (t
, 0))
839 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t
, 0))) == RESULT_DECL
840 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
842 bitmap_bit_p ((bitmap
)data
,
843 DECL_UID (DECL_RESULT (current_function_decl
)));
848 /* Compute local properties of basic block BB we collect when looking for
849 split points. We look for ssa defs and store them in SET_SSA_NAMES,
850 for ssa uses and store them in USED_SSA_NAMES and for any non-SSA automatic
851 vars stored in NON_SSA_VARS.
853 When BB has edge to RETURN_BB, collect uses in RETURN_BB too.
855 Return false when BB contains something that prevents it from being put into
859 visit_bb (basic_block bb
, basic_block return_bb
,
860 bitmap set_ssa_names
, bitmap used_ssa_names
,
865 bool can_split
= true;
867 for (gimple_stmt_iterator bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
);
870 gimple
*stmt
= gsi_stmt (bsi
);
875 if (is_gimple_debug (stmt
))
878 if (gimple_clobber_p (stmt
))
881 /* FIXME: We can split regions containing EH. We can not however
882 split RESX, EH_DISPATCH and EH_POINTER referring to same region
883 into different partitions. This would require tracking of
884 EH regions and checking in consider_split_point if they
885 are not used elsewhere. */
886 if (gimple_code (stmt
) == GIMPLE_RESX
)
888 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
889 fprintf (dump_file
, "Cannot split: resx.\n");
892 if (gimple_code (stmt
) == GIMPLE_EH_DISPATCH
)
894 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
895 fprintf (dump_file
, "Cannot split: eh dispatch.\n");
899 /* Check builtins that prevent splitting. */
900 if (gimple_code (stmt
) == GIMPLE_CALL
901 && (decl
= gimple_call_fndecl (stmt
)) != NULL_TREE
902 && DECL_BUILT_IN (decl
)
903 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
904 switch (DECL_FUNCTION_CODE (decl
))
906 /* FIXME: once we will allow passing non-parm values to split part,
907 we need to be sure to handle correct builtin_stack_save and
908 builtin_stack_restore. At the moment we are safe; there is no
909 way to store builtin_stack_save result in non-SSA variable
910 since all calls to those are compiler generated. */
912 case BUILT_IN_APPLY_ARGS
:
913 case BUILT_IN_VA_START
:
914 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
916 "Cannot split: builtin_apply and va_start.\n");
919 case BUILT_IN_EH_POINTER
:
920 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
921 fprintf (dump_file
, "Cannot split: builtin_eh_pointer.\n");
928 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
929 bitmap_set_bit (set_ssa_names
, SSA_NAME_VERSION (op
));
930 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
931 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
932 can_split
&= !walk_stmt_load_store_addr_ops (stmt
, non_ssa_vars
,
937 for (gphi_iterator bsi
= gsi_start_phis (bb
); !gsi_end_p (bsi
);
940 gphi
*stmt
= bsi
.phi ();
943 if (virtual_operand_p (gimple_phi_result (stmt
)))
945 bitmap_set_bit (set_ssa_names
,
946 SSA_NAME_VERSION (gimple_phi_result (stmt
)));
947 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
949 tree op
= gimple_phi_arg_def (stmt
, i
);
950 if (TREE_CODE (op
) == SSA_NAME
)
951 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
953 can_split
&= !walk_stmt_load_store_addr_ops (stmt
, non_ssa_vars
,
958 /* Record also uses coming from PHI operand in return BB. */
959 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
960 if (e
->dest
== return_bb
)
962 for (gphi_iterator bsi
= gsi_start_phis (return_bb
);
966 gphi
*stmt
= bsi
.phi ();
967 tree op
= gimple_phi_arg_def (stmt
, e
->dest_idx
);
969 if (virtual_operand_p (gimple_phi_result (stmt
)))
971 if (TREE_CODE (op
) == SSA_NAME
)
972 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
974 can_split
&= !mark_nonssa_use (stmt
, op
, op
, non_ssa_vars
);
980 /* Stack entry for recursive DFS walk in find_split_point. */
984 /* Basic block we are examining. */
987 /* SSA names set and used by the BB and all BBs reachable
988 from it via DFS walk. */
989 bitmap set_ssa_names
, used_ssa_names
;
992 /* All BBS visited from this BB via DFS walk. */
995 /* Last examined edge in DFS walk. Since we walk unoriented graph,
996 the value is up to sum of incoming and outgoing edges of BB. */
997 unsigned int edge_num
;
999 /* Stack entry index of earliest BB reachable from current BB
1000 or any BB visited later in DFS walk. */
1003 /* Overall time and size of all BBs reached from this BB in DFS walk. */
1007 /* When false we can not split on this BB. */
1012 /* Find all articulations and call consider_split on them.
1013 OVERALL_TIME and OVERALL_SIZE is time and size of the function.
1015 We perform basic algorithm for finding an articulation in a graph
1016 created from CFG by considering it to be an unoriented graph.
1018 The articulation is discovered via DFS walk. We collect earliest
1019 basic block on stack that is reachable via backward edge. Articulation
1020 is any basic block such that there is no backward edge bypassing it.
1021 To reduce stack usage we maintain heap allocated stack in STACK vector.
1022 AUX pointer of BB is set to index it appears in the stack or -1 once
1023 it is visited and popped off the stack.
1025 The algorithm finds articulation after visiting the whole component
1026 reachable by it. This makes it convenient to collect information about
1027 the component used by consider_split. */
1030 find_split_points (basic_block return_bb
, sreal overall_time
, int overall_size
)
1033 vec
<stack_entry
> stack
= vNULL
;
1035 struct split_point current
;
1037 current
.header_time
= overall_time
;
1038 current
.header_size
= overall_size
;
1039 current
.split_time
= 0;
1040 current
.split_size
= 0;
1041 current
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
1043 first
.bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
1045 first
.overall_time
= 0;
1046 first
.overall_size
= 0;
1047 first
.earliest
= INT_MAX
;
1048 first
.set_ssa_names
= 0;
1049 first
.used_ssa_names
= 0;
1050 first
.non_ssa_vars
= 0;
1051 first
.bbs_visited
= 0;
1052 first
.can_split
= false;
1053 stack
.safe_push (first
);
1054 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->aux
= (void *)(intptr_t)-1;
1056 while (!stack
.is_empty ())
1058 stack_entry
*entry
= &stack
.last ();
1060 /* We are walking an acyclic graph, so edge_num counts
1061 succ and pred edges together. However when considering
1062 articulation, we want to have processed everything reachable
1063 from articulation but nothing that reaches into it. */
1064 if (entry
->edge_num
== EDGE_COUNT (entry
->bb
->succs
)
1065 && entry
->bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1067 int pos
= stack
.length ();
1068 entry
->can_split
&= visit_bb (entry
->bb
, return_bb
,
1069 entry
->set_ssa_names
,
1070 entry
->used_ssa_names
,
1071 entry
->non_ssa_vars
);
1072 if (pos
<= entry
->earliest
&& !entry
->can_split
1073 && dump_file
&& (dump_flags
& TDF_DETAILS
))
1075 "found articulation at bb %i but can not split\n",
1077 if (pos
<= entry
->earliest
&& entry
->can_split
)
1079 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1080 fprintf (dump_file
, "found articulation at bb %i\n",
1082 current
.entry_bb
= entry
->bb
;
1083 current
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
1084 bitmap_and_compl (current
.ssa_names_to_pass
,
1085 entry
->used_ssa_names
, entry
->set_ssa_names
);
1086 current
.header_time
= overall_time
- entry
->overall_time
;
1087 current
.header_size
= overall_size
- entry
->overall_size
;
1088 current
.split_time
= entry
->overall_time
;
1089 current
.split_size
= entry
->overall_size
;
1090 current
.split_bbs
= entry
->bbs_visited
;
1091 consider_split (¤t
, entry
->non_ssa_vars
, return_bb
);
1092 BITMAP_FREE (current
.ssa_names_to_pass
);
1095 /* Do actual DFS walk. */
1097 < (EDGE_COUNT (entry
->bb
->succs
)
1098 + EDGE_COUNT (entry
->bb
->preds
)))
1102 if (entry
->edge_num
< EDGE_COUNT (entry
->bb
->succs
))
1104 e
= EDGE_SUCC (entry
->bb
, entry
->edge_num
);
1109 e
= EDGE_PRED (entry
->bb
, entry
->edge_num
1110 - EDGE_COUNT (entry
->bb
->succs
));
1116 /* New BB to visit, push it to the stack. */
1117 if (dest
!= return_bb
&& dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
1120 stack_entry new_entry
;
1122 new_entry
.bb
= dest
;
1123 new_entry
.edge_num
= 0;
1124 new_entry
.overall_time
1125 = bb_info_vec
[dest
->index
].time
;
1126 new_entry
.overall_size
1127 = bb_info_vec
[dest
->index
].size
;
1128 new_entry
.earliest
= INT_MAX
;
1129 new_entry
.set_ssa_names
= BITMAP_ALLOC (NULL
);
1130 new_entry
.used_ssa_names
= BITMAP_ALLOC (NULL
);
1131 new_entry
.bbs_visited
= BITMAP_ALLOC (NULL
);
1132 new_entry
.non_ssa_vars
= BITMAP_ALLOC (NULL
);
1133 new_entry
.can_split
= true;
1134 bitmap_set_bit (new_entry
.bbs_visited
, dest
->index
);
1135 stack
.safe_push (new_entry
);
1136 dest
->aux
= (void *)(intptr_t)stack
.length ();
1138 /* Back edge found, record the earliest point. */
1139 else if ((intptr_t)dest
->aux
> 0
1140 && (intptr_t)dest
->aux
< entry
->earliest
)
1141 entry
->earliest
= (intptr_t)dest
->aux
;
1143 /* We are done with examining the edges. Pop off the value from stack
1144 and merge stuff we accumulate during the walk. */
1145 else if (entry
->bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1147 stack_entry
*prev
= &stack
[stack
.length () - 2];
1149 entry
->bb
->aux
= (void *)(intptr_t)-1;
1150 prev
->can_split
&= entry
->can_split
;
1151 if (prev
->set_ssa_names
)
1153 bitmap_ior_into (prev
->set_ssa_names
, entry
->set_ssa_names
);
1154 bitmap_ior_into (prev
->used_ssa_names
, entry
->used_ssa_names
);
1155 bitmap_ior_into (prev
->bbs_visited
, entry
->bbs_visited
);
1156 bitmap_ior_into (prev
->non_ssa_vars
, entry
->non_ssa_vars
);
1158 if (prev
->earliest
> entry
->earliest
)
1159 prev
->earliest
= entry
->earliest
;
1160 prev
->overall_time
+= entry
->overall_time
;
1161 prev
->overall_size
+= entry
->overall_size
;
1162 BITMAP_FREE (entry
->set_ssa_names
);
1163 BITMAP_FREE (entry
->used_ssa_names
);
1164 BITMAP_FREE (entry
->bbs_visited
);
1165 BITMAP_FREE (entry
->non_ssa_vars
);
1171 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->aux
= NULL
;
1172 FOR_EACH_BB_FN (bb
, cfun
)
1175 BITMAP_FREE (current
.ssa_names_to_pass
);
1178 /* Split function at SPLIT_POINT. */
1181 split_function (basic_block return_bb
, struct split_point
*split_point
,
1182 bool add_tsan_func_exit
)
1184 vec
<tree
> args_to_pass
= vNULL
;
1185 bitmap args_to_skip
;
1188 cgraph_node
*node
, *cur_node
= cgraph_node::get (current_function_decl
);
1189 basic_block call_bb
;
1190 gcall
*call
, *tsan_func_exit_call
= NULL
;
1193 tree retval
= NULL
, real_retval
= NULL
;
1194 gimple
*last_stmt
= NULL
;
1200 fprintf (dump_file
, "\n\nSplitting function at:\n");
1201 dump_split_point (dump_file
, split_point
);
1204 if (cur_node
->local
.can_change_signature
)
1205 args_to_skip
= BITMAP_ALLOC (NULL
);
1207 args_to_skip
= NULL
;
1209 /* Collect the parameters of new function and args_to_skip bitmap. */
1210 for (parm
= DECL_ARGUMENTS (current_function_decl
);
1211 parm
; parm
= DECL_CHAIN (parm
), num
++)
1213 && (!is_gimple_reg (parm
)
1214 || (ddef
= ssa_default_def (cfun
, parm
)) == NULL_TREE
1215 || !bitmap_bit_p (split_point
->ssa_names_to_pass
,
1216 SSA_NAME_VERSION (ddef
))))
1217 bitmap_set_bit (args_to_skip
, num
);
1220 /* This parm might not have been used up to now, but is going to be
1221 used, hence register it. */
1222 if (is_gimple_reg (parm
))
1223 arg
= get_or_create_ssa_default_def (cfun
, parm
);
1227 if (!useless_type_conversion_p (DECL_ARG_TYPE (parm
), TREE_TYPE (arg
)))
1228 arg
= fold_convert (DECL_ARG_TYPE (parm
), arg
);
1229 args_to_pass
.safe_push (arg
);
1232 /* See if the split function will return. */
1233 bool split_part_return_p
= false;
1234 FOR_EACH_EDGE (e
, ei
, return_bb
->preds
)
1236 if (bitmap_bit_p (split_point
->split_bbs
, e
->src
->index
))
1237 split_part_return_p
= true;
1240 /* Add return block to what will become the split function.
1241 We do not return; no return block is needed. */
1242 if (!split_part_return_p
)
1244 /* We have no return block, so nothing is needed. */
1245 else if (return_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1247 /* When we do not want to return value, we need to construct
1248 new return block with empty return statement.
1249 FIXME: Once we are able to change return type, we should change function
1250 to return void instead of just outputting function with undefined return
1251 value. For structures this affects quality of codegen. */
1252 else if ((retval
= find_retval (return_bb
))
1253 && !split_point
->split_part_set_retval
)
1255 bool redirected
= true;
1256 basic_block new_return_bb
= create_basic_block (NULL
, 0, return_bb
);
1257 gimple_stmt_iterator gsi
= gsi_start_bb (new_return_bb
);
1258 gsi_insert_after (&gsi
, gimple_build_return (NULL
), GSI_NEW_STMT
);
1259 new_return_bb
->count
= profile_count::zero ();
1263 FOR_EACH_EDGE (e
, ei
, return_bb
->preds
)
1264 if (bitmap_bit_p (split_point
->split_bbs
, e
->src
->index
))
1266 new_return_bb
->count
+= e
->count ();
1267 redirect_edge_and_branch (e
, new_return_bb
);
1272 e
= make_single_succ_edge (new_return_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
1273 add_bb_to_loop (new_return_bb
, current_loops
->tree_root
);
1274 bitmap_set_bit (split_point
->split_bbs
, new_return_bb
->index
);
1276 /* When we pass around the value, use existing return block. */
1278 bitmap_set_bit (split_point
->split_bbs
, return_bb
->index
);
1280 /* If RETURN_BB has virtual operand PHIs, they must be removed and the
1281 virtual operand marked for renaming as we change the CFG in a way that
1282 tree-inline is not able to compensate for.
1284 Note this can happen whether or not we have a return value. If we have
1285 a return value, then RETURN_BB may have PHIs for real operands too. */
1286 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1289 for (gphi_iterator gsi
= gsi_start_phis (return_bb
);
1292 gphi
*stmt
= gsi
.phi ();
1293 if (!virtual_operand_p (gimple_phi_result (stmt
)))
1298 mark_virtual_phi_result_for_renaming (stmt
);
1299 remove_phi_node (&gsi
, true);
1302 /* In reality we have to rename the reaching definition of the
1303 virtual operand at return_bb as we will eventually release it
1304 when we remove the code region we outlined.
1305 So we have to rename all immediate virtual uses of that region
1306 if we didn't see a PHI definition yet. */
1307 /* ??? In real reality we want to set the reaching vdef of the
1308 entry of the SESE region as the vuse of the call and the reaching
1309 vdef of the exit of the SESE region as the vdef of the call. */
1311 for (gimple_stmt_iterator gsi
= gsi_start_bb (return_bb
);
1315 gimple
*stmt
= gsi_stmt (gsi
);
1316 if (gimple_vuse (stmt
))
1318 gimple_set_vuse (stmt
, NULL_TREE
);
1321 if (gimple_vdef (stmt
))
1326 /* Now create the actual clone. */
1327 cgraph_edge::rebuild_edges ();
1328 node
= cur_node
->create_version_clone_with_body
1329 (vNULL
, NULL
, args_to_skip
,
1330 !split_part_return_p
|| !split_point
->split_part_set_retval
,
1331 split_point
->split_bbs
, split_point
->entry_bb
, "part");
1333 node
->split_part
= true;
1335 if (cur_node
->same_comdat_group
)
1337 /* TODO: call is versionable if we make sure that all
1338 callers are inside of a comdat group. */
1339 cur_node
->calls_comdat_local
= 1;
1340 node
->add_to_same_comdat_group (cur_node
);
1344 /* Let's take a time profile for splitted function. */
1345 node
->tp_first_run
= cur_node
->tp_first_run
+ 1;
1347 /* For usual cloning it is enough to clear builtin only when signature
1348 changes. For partial inlining we however can not expect the part
1349 of builtin implementation to have same semantic as the whole. */
1350 if (DECL_BUILT_IN (node
->decl
))
1352 DECL_BUILT_IN_CLASS (node
->decl
) = NOT_BUILT_IN
;
1353 DECL_FUNCTION_CODE (node
->decl
) = (enum built_in_function
) 0;
1356 /* If return_bb contains any clobbers that refer to SSA_NAMEs
1357 set in the split part, remove them. Also reset debug stmts that
1358 refer to SSA_NAMEs set in the split part. */
1359 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1361 gimple_stmt_iterator gsi
= gsi_start_bb (return_bb
);
1362 while (!gsi_end_p (gsi
))
1366 gimple
*stmt
= gsi_stmt (gsi
);
1367 bool remove
= false;
1368 if (gimple_clobber_p (stmt
) || is_gimple_debug (stmt
))
1369 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
1371 basic_block bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
1375 && bitmap_bit_p (split_point
->split_bbs
, bb
->index
))
1377 if (is_gimple_debug (stmt
))
1379 gimple_debug_bind_reset_value (stmt
);
1388 gsi_remove (&gsi
, true);
1394 /* If the original function is declared inline, there is no point in issuing
1395 a warning for the non-inlinable part. */
1396 DECL_NO_INLINE_WARNING_P (node
->decl
) = 1;
1397 cur_node
->remove_callees ();
1398 cur_node
->remove_all_references ();
1399 if (!split_part_return_p
)
1400 TREE_THIS_VOLATILE (node
->decl
) = 1;
1402 dump_function_to_file (node
->decl
, dump_file
, dump_flags
);
1404 /* Create the basic block we place call into. It is the entry basic block
1405 split after last label. */
1406 call_bb
= split_point
->entry_bb
;
1407 for (gimple_stmt_iterator gsi
= gsi_start_bb (call_bb
); !gsi_end_p (gsi
);)
1408 if (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
)
1410 last_stmt
= gsi_stmt (gsi
);
1415 call_bb
->count
= split_point
->count
;
1416 e
= split_block (split_point
->entry_bb
, last_stmt
);
1419 /* Produce the call statement. */
1420 gimple_stmt_iterator gsi
= gsi_last_bb (call_bb
);
1421 FOR_EACH_VEC_ELT (args_to_pass
, i
, arg
)
1422 if (!is_gimple_val (arg
))
1424 arg
= force_gimple_operand_gsi (&gsi
, arg
, true, NULL_TREE
,
1425 false, GSI_CONTINUE_LINKING
);
1426 args_to_pass
[i
] = arg
;
1428 call
= gimple_build_call_vec (node
->decl
, args_to_pass
);
1429 gimple_set_block (call
, DECL_INITIAL (current_function_decl
));
1430 args_to_pass
.release ();
1432 /* For optimized away parameters, add on the caller side
1434 DEBUG D#X => parm_Y(D)
1435 stmts and associate D#X with parm in decl_debug_args_lookup
1436 vector to say for debug info that if parameter parm had been passed,
1437 it would have value parm_Y(D). */
1440 vec
<tree
, va_gc
> **debug_args
= NULL
;
1441 unsigned i
= 0, len
= 0;
1442 if (MAY_HAVE_DEBUG_BIND_STMTS
)
1444 debug_args
= decl_debug_args_lookup (node
->decl
);
1446 len
= vec_safe_length (*debug_args
);
1448 for (parm
= DECL_ARGUMENTS (current_function_decl
), num
= 0;
1449 parm
; parm
= DECL_CHAIN (parm
), num
++)
1450 if (bitmap_bit_p (args_to_skip
, num
) && is_gimple_reg (parm
))
1455 /* This needs to be done even without
1456 MAY_HAVE_DEBUG_BIND_STMTS, otherwise if it didn't exist
1457 before, we'd end up with different SSA_NAME_VERSIONs
1458 between -g and -g0. */
1459 arg
= get_or_create_ssa_default_def (cfun
, parm
);
1460 if (!MAY_HAVE_DEBUG_BIND_STMTS
|| debug_args
== NULL
)
1463 while (i
< len
&& (**debug_args
)[i
] != DECL_ORIGIN (parm
))
1467 ddecl
= (**debug_args
)[i
+ 1];
1469 = gimple_build_debug_bind (ddecl
, unshare_expr (arg
), call
);
1470 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
1474 /* We avoid address being taken on any variable used by split part,
1475 so return slot optimization is always possible. Moreover this is
1476 required to make DECL_BY_REFERENCE work. */
1477 if (aggregate_value_p (DECL_RESULT (current_function_decl
),
1478 TREE_TYPE (current_function_decl
))
1479 && (!is_gimple_reg_type (TREE_TYPE (DECL_RESULT (current_function_decl
)))
1480 || DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
))))
1481 gimple_call_set_return_slot_opt (call
, true);
1483 if (add_tsan_func_exit
)
1484 tsan_func_exit_call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
1486 /* Update return value. This is bit tricky. When we do not return,
1487 do nothing. When we return we might need to update return_bb
1488 or produce a new return statement. */
1489 if (!split_part_return_p
)
1491 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1492 if (tsan_func_exit_call
)
1493 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1497 e
= make_single_succ_edge (call_bb
, return_bb
,
1498 return_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
1499 ? 0 : EDGE_FALLTHRU
);
1501 /* If there is return basic block, see what value we need to store
1502 return value into and put call just before it. */
1503 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1505 real_retval
= retval
;
1506 if (real_retval
&& split_point
->split_part_set_retval
)
1510 /* See if we need new SSA_NAME for the result.
1511 When DECL_BY_REFERENCE is true, retval is actually pointer to
1512 return value and it is constant in whole function. */
1513 if (TREE_CODE (retval
) == SSA_NAME
1514 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1516 retval
= copy_ssa_name (retval
, call
);
1518 /* See if there is PHI defining return value. */
1519 for (psi
= gsi_start_phis (return_bb
);
1520 !gsi_end_p (psi
); gsi_next (&psi
))
1521 if (!virtual_operand_p (gimple_phi_result (psi
.phi ())))
1524 /* When there is PHI, just update its value. */
1525 if (TREE_CODE (retval
) == SSA_NAME
1526 && !gsi_end_p (psi
))
1527 add_phi_arg (psi
.phi (), retval
, e
, UNKNOWN_LOCATION
);
1528 /* Otherwise update the return BB itself.
1529 find_return_bb allows at most one assignment to return value,
1530 so update first statement. */
1533 gimple_stmt_iterator bsi
;
1534 for (bsi
= gsi_start_bb (return_bb
); !gsi_end_p (bsi
);
1536 if (greturn
*return_stmt
1537 = dyn_cast
<greturn
*> (gsi_stmt (bsi
)))
1539 gimple_return_set_retval (return_stmt
, retval
);
1542 else if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_ASSIGN
1543 && !gimple_clobber_p (gsi_stmt (bsi
)))
1545 gimple_assign_set_rhs1 (gsi_stmt (bsi
), retval
);
1548 update_stmt (gsi_stmt (bsi
));
1549 /* Also adjust clobbers and debug stmts in return_bb. */
1550 for (bsi
= gsi_start_bb (return_bb
); !gsi_end_p (bsi
);
1553 gimple
*stmt
= gsi_stmt (bsi
);
1554 if (gimple_clobber_p (stmt
)
1555 || is_gimple_debug (stmt
))
1558 use_operand_p use_p
;
1559 bool update
= false;
1560 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
,
1562 if (USE_FROM_PTR (use_p
) == real_retval
)
1564 SET_USE (use_p
, retval
);
1573 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1575 gimple_call_set_lhs (call
, build_simple_mem_ref (retval
));
1576 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1581 restype
= TREE_TYPE (DECL_RESULT (current_function_decl
));
1582 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1583 if (!useless_type_conversion_p (TREE_TYPE (retval
), restype
))
1586 tree tem
= create_tmp_reg (restype
);
1587 tem
= make_ssa_name (tem
, call
);
1588 cpy
= gimple_build_assign (retval
, NOP_EXPR
, tem
);
1589 gsi_insert_after (&gsi
, cpy
, GSI_NEW_STMT
);
1592 gimple_call_set_lhs (call
, retval
);
1597 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1598 if (tsan_func_exit_call
)
1599 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1601 /* We don't use return block (there is either no return in function or
1602 multiple of them). So create new basic block with return statement.
1607 if (split_point
->split_part_set_retval
1608 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1610 retval
= DECL_RESULT (current_function_decl
);
1612 /* We use temporary register to hold value when aggregate_value_p
1613 is false. Similarly for DECL_BY_REFERENCE we must avoid extra
1615 if (!aggregate_value_p (retval
, TREE_TYPE (current_function_decl
))
1616 && !DECL_BY_REFERENCE (retval
))
1617 retval
= create_tmp_reg (TREE_TYPE (retval
));
1618 if (is_gimple_reg (retval
))
1620 /* When returning by reference, there is only one SSA name
1621 assigned to RESULT_DECL (that is pointer to return value).
1622 Look it up or create new one if it is missing. */
1623 if (DECL_BY_REFERENCE (retval
))
1624 retval
= get_or_create_ssa_default_def (cfun
, retval
);
1625 /* Otherwise produce new SSA name for return value. */
1627 retval
= make_ssa_name (retval
, call
);
1629 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1630 gimple_call_set_lhs (call
, build_simple_mem_ref (retval
));
1632 gimple_call_set_lhs (call
, retval
);
1633 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1637 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1639 && is_gimple_reg_type (TREE_TYPE (retval
))
1640 && !is_gimple_val (retval
))
1643 = gimple_build_assign (make_ssa_name (TREE_TYPE (retval
)),
1645 retval
= gimple_assign_lhs (g
);
1646 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1649 if (tsan_func_exit_call
)
1650 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1651 ret
= gimple_build_return (retval
);
1652 gsi_insert_after (&gsi
, ret
, GSI_NEW_STMT
);
1655 free_dominance_info (CDI_DOMINATORS
);
1656 free_dominance_info (CDI_POST_DOMINATORS
);
1657 compute_fn_summary (node
, true);
1660 /* Execute function splitting pass. */
1663 execute_split_functions (void)
1665 gimple_stmt_iterator bsi
;
1667 sreal overall_time
= 0;
1668 int overall_size
= 0;
1670 struct cgraph_node
*node
= cgraph_node::get (current_function_decl
);
1672 if (flags_from_decl_or_type (current_function_decl
)
1673 & (ECF_NORETURN
|ECF_MALLOC
))
1676 fprintf (dump_file
, "Not splitting: noreturn/malloc function.\n");
1679 if (MAIN_NAME_P (DECL_NAME (current_function_decl
)))
1682 fprintf (dump_file
, "Not splitting: main function.\n");
1685 if (node
->frequency
== NODE_FREQUENCY_UNLIKELY_EXECUTED
)
1688 fprintf (dump_file
, "Not splitting: function is unlikely executed.\n");
1691 /* This can be relaxed; function might become inlinable after splitting
1692 away the uninlinable part. */
1693 if (ipa_fn_summaries
1694 && !ipa_fn_summaries
->get_create (node
)->inlinable
)
1697 fprintf (dump_file
, "Not splitting: not inlinable.\n");
1700 if (DECL_DISREGARD_INLINE_LIMITS (node
->decl
))
1703 fprintf (dump_file
, "Not splitting: disregarding inline limits.\n");
1706 /* This can be relaxed; most of versioning tests actually prevents
1708 if (!tree_versionable_function_p (current_function_decl
))
1711 fprintf (dump_file
, "Not splitting: not versionable.\n");
1714 /* FIXME: we could support this. */
1715 if (DECL_STRUCT_FUNCTION (current_function_decl
)->static_chain_decl
)
1718 fprintf (dump_file
, "Not splitting: nested function.\n");
1722 /* See if it makes sense to try to split.
1723 It makes sense to split if we inline, that is if we have direct calls to
1724 handle or direct calls are possibly going to appear as result of indirect
1725 inlining or LTO. Also handle -fprofile-generate as LTO to allow non-LTO
1726 training for LTO -fprofile-use build.
1728 Note that we are not completely conservative about disqualifying functions
1729 called once. It is possible that the caller is called more then once and
1730 then inlining would still benefit. */
1732 /* Local functions called once will be completely inlined most of time. */
1733 || (!node
->callers
->next_caller
&& node
->local
.local
))
1734 && !node
->address_taken
1735 && !node
->has_aliases_p ()
1736 && (!flag_lto
|| !node
->externally_visible
))
1739 fprintf (dump_file
, "Not splitting: not called directly "
1740 "or called once.\n");
1744 /* FIXME: We can actually split if splitting reduces call overhead. */
1745 if (!flag_inline_small_functions
1746 && !DECL_DECLARED_INLINE_P (current_function_decl
))
1749 fprintf (dump_file
, "Not splitting: not autoinlining and function"
1750 " is not inline.\n");
1754 /* We enforce splitting after loop headers when profile info is not
1756 if (profile_status_for_fn (cfun
) != PROFILE_READ
)
1757 mark_dfs_back_edges ();
1759 /* Initialize bitmap to track forbidden calls. */
1760 forbidden_dominators
= BITMAP_ALLOC (NULL
);
1761 calculate_dominance_info (CDI_DOMINATORS
);
1763 /* Compute local info about basic blocks and determine function size/time. */
1764 bb_info_vec
.safe_grow_cleared (last_basic_block_for_fn (cfun
) + 1);
1765 best_split_point
.split_bbs
= NULL
;
1766 basic_block return_bb
= find_return_bb ();
1767 int tsan_exit_found
= -1;
1768 FOR_EACH_BB_FN (bb
, cfun
)
1772 sreal freq
= bb
->count
.to_sreal_scale
1773 (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
);
1775 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1776 fprintf (dump_file
, "Basic block %i\n", bb
->index
);
1778 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
1782 gimple
*stmt
= gsi_stmt (bsi
);
1784 this_size
= estimate_num_insns (stmt
, &eni_size_weights
);
1785 this_time
= (sreal
)estimate_num_insns (stmt
, &eni_time_weights
)
1789 check_forbidden_calls (stmt
);
1791 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1793 fprintf (dump_file
, " freq:%4.2f size:%3i time:%4.2f ",
1794 freq
.to_double (), this_size
, this_time
.to_double ());
1795 print_gimple_stmt (dump_file
, stmt
, 0);
1798 if ((flag_sanitize
& SANITIZE_THREAD
)
1799 && gimple_call_internal_p (stmt
, IFN_TSAN_FUNC_EXIT
))
1801 /* We handle TSAN_FUNC_EXIT for splitting either in the
1802 return_bb, or in its immediate predecessors. */
1803 if ((bb
!= return_bb
&& !find_edge (bb
, return_bb
))
1804 || (tsan_exit_found
!= -1
1805 && tsan_exit_found
!= (bb
!= return_bb
)))
1808 fprintf (dump_file
, "Not splitting: TSAN_FUNC_EXIT"
1809 " in unexpected basic block.\n");
1810 BITMAP_FREE (forbidden_dominators
);
1811 bb_info_vec
.release ();
1814 tsan_exit_found
= bb
!= return_bb
;
1817 overall_time
+= time
;
1818 overall_size
+= size
;
1819 bb_info_vec
[bb
->index
].time
= time
;
1820 bb_info_vec
[bb
->index
].size
= size
;
1822 find_split_points (return_bb
, overall_time
, overall_size
);
1823 if (best_split_point
.split_bbs
)
1825 split_function (return_bb
, &best_split_point
, tsan_exit_found
== 1);
1826 BITMAP_FREE (best_split_point
.ssa_names_to_pass
);
1827 BITMAP_FREE (best_split_point
.split_bbs
);
1828 todo
= TODO_update_ssa
| TODO_cleanup_cfg
;
1830 BITMAP_FREE (forbidden_dominators
);
1831 bb_info_vec
.release ();
1837 const pass_data pass_data_split_functions
=
1839 GIMPLE_PASS
, /* type */
1840 "fnsplit", /* name */
1841 OPTGROUP_NONE
, /* optinfo_flags */
1842 TV_IPA_FNSPLIT
, /* tv_id */
1843 PROP_cfg
, /* properties_required */
1844 0, /* properties_provided */
1845 0, /* properties_destroyed */
1846 0, /* todo_flags_start */
1847 0, /* todo_flags_finish */
1850 class pass_split_functions
: public gimple_opt_pass
1853 pass_split_functions (gcc::context
*ctxt
)
1854 : gimple_opt_pass (pass_data_split_functions
, ctxt
)
1857 /* opt_pass methods: */
1858 virtual bool gate (function
*);
1859 virtual unsigned int execute (function
*)
1861 return execute_split_functions ();
1864 }; // class pass_split_functions
1867 pass_split_functions::gate (function
*)
1869 /* When doing profile feedback, we want to execute the pass after profiling
1870 is read. So disable one in early optimization. */
1871 return (flag_partial_inlining
1872 && !profile_arc_flag
&& !flag_branch_probabilities
);
1878 make_pass_split_functions (gcc::context
*ctxt
)
1880 return new pass_split_functions (ctxt
);
1883 /* Execute function splitting pass. */
1886 execute_feedback_split_functions (void)
1888 unsigned int retval
= execute_split_functions ();
1890 retval
|= TODO_rebuild_cgraph_edges
;
1896 const pass_data pass_data_feedback_split_functions
=
1898 GIMPLE_PASS
, /* type */
1899 "feedback_fnsplit", /* name */
1900 OPTGROUP_NONE
, /* optinfo_flags */
1901 TV_IPA_FNSPLIT
, /* tv_id */
1902 PROP_cfg
, /* properties_required */
1903 0, /* properties_provided */
1904 0, /* properties_destroyed */
1905 0, /* todo_flags_start */
1906 0, /* todo_flags_finish */
1909 class pass_feedback_split_functions
: public gimple_opt_pass
1912 pass_feedback_split_functions (gcc::context
*ctxt
)
1913 : gimple_opt_pass (pass_data_feedback_split_functions
, ctxt
)
1916 /* opt_pass methods: */
1917 virtual bool gate (function
*);
1918 virtual unsigned int execute (function
*)
1920 return execute_feedback_split_functions ();
1923 }; // class pass_feedback_split_functions
1926 pass_feedback_split_functions::gate (function
*)
1928 /* We don't need to split when profiling at all, we are producing
1929 lousy code anyway. */
1930 return (flag_partial_inlining
1931 && flag_branch_probabilities
);
1937 make_pass_feedback_split_functions (gcc::context
*ctxt
)
1939 return new pass_feedback_split_functions (ctxt
);