1 /* Function splitting pass
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka <jh@suse.cz>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* The purpose of this pass is to split function bodies to improve
22 inlining. I.e. for function of the form:
47 When func becomes inlinable and when cheap_test is often true, inlining func,
48 but not fund.part leads to performance improvement similar as inlining
49 original func while the code size growth is smaller.
51 The pass is organized in three stages:
52 1) Collect local info about basic block into BB_INFO structure and
53 compute function body estimated size and time.
54 2) Via DFS walk find all possible basic blocks where we can split
56 3) If split point is found, split at the specified BB by creating a clone
57 and updating function to call it.
59 The decisions what functions to split are in execute_split_functions
62 There are several possible future improvements for this pass including:
64 1) Splitting to break up large functions
65 2) Splitting to reduce stack frame usage
66 3) Allow split part of function to use values computed in the header part.
67 The values needs to be passed to split function, perhaps via same
68 interface as for nested functions or as argument.
69 4) Support for simple rematerialization. I.e. when split part use
70 value computed in header from function parameter in very cheap way, we
71 can just recompute it.
72 5) Support splitting of nested functions.
73 6) Support non-SSA arguments.
74 7) There is nothing preventing us from producing multiple parts of single function
75 when needed or splitting also the parts. */
79 #include "coretypes.h"
85 #include "alloc-pool.h"
86 #include "tree-pass.h"
89 #include "diagnostic.h"
90 #include "fold-const.h"
94 #include "gimple-iterator.h"
95 #include "gimplify-me.h"
96 #include "gimple-walk.h"
97 #include "symbol-summary.h"
100 #include "tree-into-ssa.h"
101 #include "tree-dfa.h"
102 #include "tree-inline.h"
104 #include "gimple-pretty-print.h"
105 #include "ipa-inline.h"
107 #include "tree-chkp.h"
109 /* Per basic block info. */
117 static vec
<split_bb_info
> bb_info_vec
;
119 /* Description of split point. */
123 /* Size of the partitions. */
124 unsigned int header_time
, header_size
, split_time
, split_size
;
126 /* SSA names that need to be passed into spit function. */
127 bitmap ssa_names_to_pass
;
129 /* Basic block where we split (that will become entry point of new function. */
130 basic_block entry_bb
;
132 /* Basic blocks we are splitting away. */
135 /* True when return value is computed on split part and thus it needs
137 bool split_part_set_retval
;
140 /* Best split point found. */
142 struct split_point best_split_point
;
144 /* Set of basic blocks that are not allowed to dominate a split point. */
146 static bitmap forbidden_dominators
;
148 static tree
find_retval (basic_block return_bb
);
149 static tree
find_retbnd (basic_block return_bb
);
151 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
152 variable, check it if it is present in bitmap passed via DATA. */
155 test_nonssa_use (gimple
*, tree t
, tree
, void *data
)
157 t
= get_base_address (t
);
159 if (!t
|| is_gimple_reg (t
))
162 if (TREE_CODE (t
) == PARM_DECL
164 && auto_var_in_fn_p (t
, current_function_decl
))
165 || TREE_CODE (t
) == RESULT_DECL
166 /* Normal labels are part of CFG and will be handled gratefuly.
167 Forced labels however can be used directly by statements and
168 need to stay in one partition along with their uses. */
169 || (TREE_CODE (t
) == LABEL_DECL
170 && FORCED_LABEL (t
)))
171 return bitmap_bit_p ((bitmap
)data
, DECL_UID (t
));
173 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
174 to pretend that the value pointed to is actual result decl. */
175 if ((TREE_CODE (t
) == MEM_REF
|| INDIRECT_REF_P (t
))
176 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
177 && SSA_NAME_VAR (TREE_OPERAND (t
, 0))
178 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t
, 0))) == RESULT_DECL
179 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
181 bitmap_bit_p ((bitmap
)data
,
182 DECL_UID (DECL_RESULT (current_function_decl
)));
187 /* Dump split point CURRENT. */
190 dump_split_point (FILE * file
, struct split_point
*current
)
193 "Split point at BB %i\n"
194 " header time: %i header size: %i\n"
195 " split time: %i split size: %i\n bbs: ",
196 current
->entry_bb
->index
, current
->header_time
,
197 current
->header_size
, current
->split_time
, current
->split_size
);
198 dump_bitmap (file
, current
->split_bbs
);
199 fprintf (file
, " SSA names to pass: ");
200 dump_bitmap (file
, current
->ssa_names_to_pass
);
203 /* Look for all BBs in header that might lead to the split part and verify
204 that they are not defining any non-SSA var used by the split part.
205 Parameters are the same as for consider_split. */
208 verify_non_ssa_vars (struct split_point
*current
, bitmap non_ssa_vars
,
209 basic_block return_bb
)
211 bitmap seen
= BITMAP_ALLOC (NULL
);
212 vec
<basic_block
> worklist
= vNULL
;
218 FOR_EACH_EDGE (e
, ei
, current
->entry_bb
->preds
)
219 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
220 && !bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
222 worklist
.safe_push (e
->src
);
223 bitmap_set_bit (seen
, e
->src
->index
);
226 while (!worklist
.is_empty ())
228 bb
= worklist
.pop ();
229 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
230 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
231 && bitmap_set_bit (seen
, e
->src
->index
))
233 gcc_checking_assert (!bitmap_bit_p (current
->split_bbs
,
235 worklist
.safe_push (e
->src
);
237 for (gimple_stmt_iterator bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
);
240 gimple
*stmt
= gsi_stmt (bsi
);
241 if (is_gimple_debug (stmt
))
243 if (walk_stmt_load_store_addr_ops
244 (stmt
, non_ssa_vars
, test_nonssa_use
, test_nonssa_use
,
250 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
251 if (test_nonssa_use (stmt
, gimple_label_label (label_stmt
),
252 NULL_TREE
, non_ssa_vars
))
258 for (gphi_iterator bsi
= gsi_start_phis (bb
); !gsi_end_p (bsi
);
261 if (walk_stmt_load_store_addr_ops
262 (gsi_stmt (bsi
), non_ssa_vars
, test_nonssa_use
, test_nonssa_use
,
269 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
271 if (e
->dest
!= return_bb
)
273 for (gphi_iterator bsi
= gsi_start_phis (return_bb
);
277 gphi
*stmt
= bsi
.phi ();
278 tree op
= gimple_phi_arg_def (stmt
, e
->dest_idx
);
280 if (virtual_operand_p (gimple_phi_result (stmt
)))
282 if (TREE_CODE (op
) != SSA_NAME
283 && test_nonssa_use (stmt
, op
, op
, non_ssa_vars
))
292 /* Verify that the rest of function does not define any label
293 used by the split part. */
294 FOR_EACH_BB_FN (bb
, cfun
)
295 if (!bitmap_bit_p (current
->split_bbs
, bb
->index
)
296 && !bitmap_bit_p (seen
, bb
->index
))
298 gimple_stmt_iterator bsi
;
299 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
300 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (bsi
)))
302 if (test_nonssa_use (label_stmt
,
303 gimple_label_label (label_stmt
),
304 NULL_TREE
, non_ssa_vars
))
320 /* If STMT is a call, check the callee against a list of forbidden
321 predicate functions. If a match is found, look for uses of the
322 call result in condition statements that compare against zero.
323 For each such use, find the block targeted by the condition
324 statement for the nonzero result, and set the bit for this block
325 in the forbidden dominators bitmap. The purpose of this is to avoid
326 selecting a split point where we are likely to lose the chance
327 to optimize away an unused function call. */
330 check_forbidden_calls (gimple
*stmt
)
332 imm_use_iterator use_iter
;
336 /* At the moment, __builtin_constant_p is the only forbidden
337 predicate function call (see PR49642). */
338 if (!gimple_call_builtin_p (stmt
, BUILT_IN_CONSTANT_P
))
341 lhs
= gimple_call_lhs (stmt
);
343 if (!lhs
|| TREE_CODE (lhs
) != SSA_NAME
)
346 FOR_EACH_IMM_USE_FAST (use_p
, use_iter
, lhs
)
349 basic_block use_bb
, forbidden_bb
;
351 edge true_edge
, false_edge
;
354 use_stmt
= dyn_cast
<gcond
*> (USE_STMT (use_p
));
358 /* Assuming canonical form for GIMPLE_COND here, with constant
359 in second position. */
360 op1
= gimple_cond_rhs (use_stmt
);
361 code
= gimple_cond_code (use_stmt
);
362 use_bb
= gimple_bb (use_stmt
);
364 extract_true_false_edges_from_block (use_bb
, &true_edge
, &false_edge
);
366 /* We're only interested in comparisons that distinguish
367 unambiguously from zero. */
368 if (!integer_zerop (op1
) || code
== LE_EXPR
|| code
== GE_EXPR
)
372 forbidden_bb
= false_edge
->dest
;
374 forbidden_bb
= true_edge
->dest
;
376 bitmap_set_bit (forbidden_dominators
, forbidden_bb
->index
);
380 /* If BB is dominated by any block in the forbidden dominators set,
381 return TRUE; else FALSE. */
384 dominated_by_forbidden (basic_block bb
)
389 EXECUTE_IF_SET_IN_BITMAP (forbidden_dominators
, 1, dom_bb
, bi
)
391 if (dominated_by_p (CDI_DOMINATORS
, bb
,
392 BASIC_BLOCK_FOR_FN (cfun
, dom_bb
)))
399 /* For give split point CURRENT and return block RETURN_BB return 1
400 if ssa name VAL is set by split part and 0 otherwise. */
402 split_part_set_ssa_name_p (tree val
, struct split_point
*current
,
403 basic_block return_bb
)
405 if (TREE_CODE (val
) != SSA_NAME
)
408 return (!SSA_NAME_IS_DEFAULT_DEF (val
)
409 && (bitmap_bit_p (current
->split_bbs
,
410 gimple_bb (SSA_NAME_DEF_STMT (val
))->index
)
411 || gimple_bb (SSA_NAME_DEF_STMT (val
)) == return_bb
));
414 /* We found an split_point CURRENT. NON_SSA_VARS is bitmap of all non ssa
415 variables used and RETURN_BB is return basic block.
416 See if we can split function here. */
419 consider_split (struct split_point
*current
, bitmap non_ssa_vars
,
420 basic_block return_bb
)
423 unsigned int num_args
= 0;
424 unsigned int call_overhead
;
429 int incoming_freq
= 0;
432 bool back_edge
= false;
434 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
435 dump_split_point (dump_file
, current
);
437 FOR_EACH_EDGE (e
, ei
, current
->entry_bb
->preds
)
439 if (e
->flags
& EDGE_DFS_BACK
)
441 if (!bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
442 incoming_freq
+= EDGE_FREQUENCY (e
);
445 /* Do not split when we would end up calling function anyway. */
447 >= (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
448 * PARAM_VALUE (PARAM_PARTIAL_INLINING_ENTRY_PROBABILITY
) / 100))
450 /* When profile is guessed, we can not expect it to give us
451 realistic estimate on likelyness of function taking the
452 complex path. As a special case, when tail of the function is
453 a loop, enable splitting since inlining code skipping the loop
454 is likely noticeable win. */
456 && profile_status_for_fn (cfun
) != PROFILE_READ
457 && incoming_freq
< ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
)
459 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
461 " Split before loop, accepting despite low frequencies %i %i.\n",
463 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
);
467 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
469 " Refused: incoming frequency is too large.\n");
474 if (!current
->header_size
)
476 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
477 fprintf (dump_file
, " Refused: header empty\n");
481 /* Verify that PHI args on entry are either virtual or all their operands
482 incoming from header are the same. */
483 for (bsi
= gsi_start_phis (current
->entry_bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
485 gphi
*stmt
= bsi
.phi ();
488 if (virtual_operand_p (gimple_phi_result (stmt
)))
490 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
492 edge e
= gimple_phi_arg_edge (stmt
, i
);
493 if (!bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
495 tree edge_val
= gimple_phi_arg_def (stmt
, i
);
496 if (val
&& edge_val
!= val
)
498 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
500 " Refused: entry BB has PHI with multiple variants\n");
509 /* See what argument we will pass to the split function and compute
511 call_overhead
= eni_size_weights
.call_cost
;
512 for (parm
= DECL_ARGUMENTS (current_function_decl
); parm
;
513 parm
= DECL_CHAIN (parm
))
515 if (!is_gimple_reg (parm
))
517 if (bitmap_bit_p (non_ssa_vars
, DECL_UID (parm
)))
519 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
521 " Refused: need to pass non-ssa param values\n");
527 tree ddef
= ssa_default_def (cfun
, parm
);
529 && bitmap_bit_p (current
->ssa_names_to_pass
,
530 SSA_NAME_VERSION (ddef
)))
532 if (!VOID_TYPE_P (TREE_TYPE (parm
)))
533 call_overhead
+= estimate_move_cost (TREE_TYPE (parm
), false);
538 if (!VOID_TYPE_P (TREE_TYPE (current_function_decl
)))
539 call_overhead
+= estimate_move_cost (TREE_TYPE (current_function_decl
),
542 if (current
->split_size
<= call_overhead
)
544 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
546 " Refused: split size is smaller than call overhead\n");
549 if (current
->header_size
+ call_overhead
550 >= (unsigned int)(DECL_DECLARED_INLINE_P (current_function_decl
)
551 ? MAX_INLINE_INSNS_SINGLE
552 : MAX_INLINE_INSNS_AUTO
))
554 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
556 " Refused: header size is too large for inline candidate\n");
560 /* Splitting functions brings the target out of comdat group; this will
561 lead to code duplication if the function is reused by other unit.
562 Limit this duplication. This is consistent with limit in tree-sra.c
563 FIXME: with LTO we ought to be able to do better! */
564 if (DECL_ONE_ONLY (current_function_decl
)
565 && current
->split_size
>= (unsigned int) MAX_INLINE_INSNS_AUTO
)
567 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
569 " Refused: function is COMDAT and tail is too large\n");
572 /* For comdat functions also reject very small tails; those will likely get
573 inlined back and we do not want to risk the duplication overhead.
574 FIXME: with LTO we ought to be able to do better! */
575 if (DECL_ONE_ONLY (current_function_decl
)
576 && current
->split_size
577 <= (unsigned int) PARAM_VALUE (PARAM_EARLY_INLINING_INSNS
) / 2)
579 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
581 " Refused: function is COMDAT and tail is too small\n");
585 /* FIXME: we currently can pass only SSA function parameters to the split
586 arguments. Once parm_adjustment infrastructure is supported by cloning,
587 we can pass more than that. */
588 if (num_args
!= bitmap_count_bits (current
->ssa_names_to_pass
))
591 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
593 " Refused: need to pass non-param values\n");
597 /* When there are non-ssa vars used in the split region, see if they
598 are used in the header region. If so, reject the split.
599 FIXME: we can use nested function support to access both. */
600 if (!bitmap_empty_p (non_ssa_vars
)
601 && !verify_non_ssa_vars (current
, non_ssa_vars
, return_bb
))
603 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
605 " Refused: split part has non-ssa uses\n");
609 /* If the split point is dominated by a forbidden block, reject
611 if (!bitmap_empty_p (forbidden_dominators
)
612 && dominated_by_forbidden (current
->entry_bb
))
614 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
616 " Refused: split point dominated by forbidden block\n");
620 /* See if retval used by return bb is computed by header or split part.
621 When it is computed by split part, we need to produce return statement
622 in the split part and add code to header to pass it around.
624 This is bit tricky to test:
625 1) When there is no return_bb or no return value, we always pass
627 2) Invariants are always computed by caller.
628 3) For SSA we need to look if defining statement is in header or split part
629 4) For non-SSA we need to look where the var is computed. */
630 retval
= find_retval (return_bb
);
633 /* If there is a return_bb with no return value in function returning
634 value by reference, also make the split part return void, otherwise
635 we expansion would try to create a non-POD temporary, which is
637 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
638 && DECL_RESULT (current_function_decl
)
639 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
640 current
->split_part_set_retval
= false;
642 current
->split_part_set_retval
= true;
644 else if (is_gimple_min_invariant (retval
))
645 current
->split_part_set_retval
= false;
646 /* Special case is value returned by reference we record as if it was non-ssa
647 set to result_decl. */
648 else if (TREE_CODE (retval
) == SSA_NAME
649 && SSA_NAME_VAR (retval
)
650 && TREE_CODE (SSA_NAME_VAR (retval
)) == RESULT_DECL
651 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
652 current
->split_part_set_retval
653 = bitmap_bit_p (non_ssa_vars
, DECL_UID (SSA_NAME_VAR (retval
)));
654 else if (TREE_CODE (retval
) == SSA_NAME
)
655 current
->split_part_set_retval
656 = split_part_set_ssa_name_p (retval
, current
, return_bb
);
657 else if (TREE_CODE (retval
) == PARM_DECL
)
658 current
->split_part_set_retval
= false;
659 else if (VAR_P (retval
)
660 || TREE_CODE (retval
) == RESULT_DECL
)
661 current
->split_part_set_retval
662 = bitmap_bit_p (non_ssa_vars
, DECL_UID (retval
));
664 current
->split_part_set_retval
= true;
666 /* See if retbnd used by return bb is computed by header or split part. */
667 retbnd
= find_retbnd (return_bb
);
670 bool split_part_set_retbnd
671 = split_part_set_ssa_name_p (retbnd
, current
, return_bb
);
673 /* If we have both return value and bounds then keep their definitions
674 in a single function. We use SSA names to link returned bounds and
675 value and therefore do not handle cases when result is passed by
676 reference (which should not be our case anyway since bounds are
677 returned for pointers only). */
678 if ((DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
))
679 && current
->split_part_set_retval
)
680 || split_part_set_retbnd
!= current
->split_part_set_retval
)
682 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
684 " Refused: split point splits return value and bounds\n");
689 /* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
690 for the return value. If there are other PHIs, give up. */
691 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
695 for (psi
= gsi_start_phis (return_bb
); !gsi_end_p (psi
); gsi_next (&psi
))
696 if (!virtual_operand_p (gimple_phi_result (psi
.phi ()))
698 && current
->split_part_set_retval
699 && TREE_CODE (retval
) == SSA_NAME
700 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
))
701 && SSA_NAME_DEF_STMT (retval
) == psi
.phi ()))
703 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
705 " Refused: return bb has extra PHIs\n");
710 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
711 fprintf (dump_file
, " Accepted!\n");
713 /* At the moment chose split point with lowest frequency and that leaves
714 out smallest size of header.
715 In future we might re-consider this heuristics. */
716 if (!best_split_point
.split_bbs
717 || best_split_point
.entry_bb
->frequency
> current
->entry_bb
->frequency
718 || (best_split_point
.entry_bb
->frequency
== current
->entry_bb
->frequency
719 && best_split_point
.split_size
< current
->split_size
))
722 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
723 fprintf (dump_file
, " New best split point!\n");
724 if (best_split_point
.ssa_names_to_pass
)
726 BITMAP_FREE (best_split_point
.ssa_names_to_pass
);
727 BITMAP_FREE (best_split_point
.split_bbs
);
729 best_split_point
= *current
;
730 best_split_point
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
731 bitmap_copy (best_split_point
.ssa_names_to_pass
,
732 current
->ssa_names_to_pass
);
733 best_split_point
.split_bbs
= BITMAP_ALLOC (NULL
);
734 bitmap_copy (best_split_point
.split_bbs
, current
->split_bbs
);
738 /* Return basic block containing RETURN statement. We allow basic blocks
742 but return_bb can not be more complex than this (except for
743 -fsanitize=thread we allow TSAN_FUNC_EXIT () internal call in there).
744 If nothing is found, return the exit block.
746 When there are multiple RETURN statement, chose one with return value,
747 since that one is more likely shared by multiple code paths.
749 Return BB is special, because for function splitting it is the only
750 basic block that is duplicated in between header and split part of the
753 TODO: We might support multiple return blocks. */
756 find_return_bb (void)
759 basic_block return_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
);
760 gimple_stmt_iterator bsi
;
761 bool found_return
= false;
762 tree retval
= NULL_TREE
;
764 if (!single_pred_p (EXIT_BLOCK_PTR_FOR_FN (cfun
)))
767 e
= single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
));
768 for (bsi
= gsi_last_bb (e
->src
); !gsi_end_p (bsi
); gsi_prev (&bsi
))
770 gimple
*stmt
= gsi_stmt (bsi
);
771 if (gimple_code (stmt
) == GIMPLE_LABEL
772 || is_gimple_debug (stmt
)
773 || gimple_clobber_p (stmt
))
775 else if (gimple_code (stmt
) == GIMPLE_ASSIGN
777 && gimple_assign_single_p (stmt
)
778 && (auto_var_in_fn_p (gimple_assign_rhs1 (stmt
),
779 current_function_decl
)
780 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
781 && retval
== gimple_assign_lhs (stmt
))
783 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
786 retval
= gimple_return_retval (return_stmt
);
788 /* For -fsanitize=thread, allow also TSAN_FUNC_EXIT () in the return
790 else if ((flag_sanitize
& SANITIZE_THREAD
)
791 && gimple_call_internal_p (stmt
, IFN_TSAN_FUNC_EXIT
))
796 if (gsi_end_p (bsi
) && found_return
)
802 /* Given return basic block RETURN_BB, see where return value is really
805 find_retval (basic_block return_bb
)
807 gimple_stmt_iterator bsi
;
808 for (bsi
= gsi_start_bb (return_bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
809 if (greturn
*return_stmt
= dyn_cast
<greturn
*> (gsi_stmt (bsi
)))
810 return gimple_return_retval (return_stmt
);
811 else if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_ASSIGN
812 && !gimple_clobber_p (gsi_stmt (bsi
)))
813 return gimple_assign_rhs1 (gsi_stmt (bsi
));
817 /* Given return basic block RETURN_BB, see where return bounds are really
820 find_retbnd (basic_block return_bb
)
822 gimple_stmt_iterator bsi
;
823 for (bsi
= gsi_last_bb (return_bb
); !gsi_end_p (bsi
); gsi_prev (&bsi
))
824 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_RETURN
)
825 return gimple_return_retbnd (gsi_stmt (bsi
));
829 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
830 variable, mark it as used in bitmap passed via DATA.
831 Return true when access to T prevents splitting the function. */
834 mark_nonssa_use (gimple
*, tree t
, tree
, void *data
)
836 t
= get_base_address (t
);
838 if (!t
|| is_gimple_reg (t
))
841 /* At present we can't pass non-SSA arguments to split function.
842 FIXME: this can be relaxed by passing references to arguments. */
843 if (TREE_CODE (t
) == PARM_DECL
)
845 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
847 "Cannot split: use of non-ssa function parameter.\n");
851 if ((VAR_P (t
) && auto_var_in_fn_p (t
, current_function_decl
))
852 || TREE_CODE (t
) == RESULT_DECL
853 || (TREE_CODE (t
) == LABEL_DECL
&& FORCED_LABEL (t
)))
854 bitmap_set_bit ((bitmap
)data
, DECL_UID (t
));
856 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
857 to pretend that the value pointed to is actual result decl. */
858 if ((TREE_CODE (t
) == MEM_REF
|| INDIRECT_REF_P (t
))
859 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
860 && SSA_NAME_VAR (TREE_OPERAND (t
, 0))
861 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t
, 0))) == RESULT_DECL
862 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
864 bitmap_bit_p ((bitmap
)data
,
865 DECL_UID (DECL_RESULT (current_function_decl
)));
870 /* Compute local properties of basic block BB we collect when looking for
871 split points. We look for ssa defs and store them in SET_SSA_NAMES,
872 for ssa uses and store them in USED_SSA_NAMES and for any non-SSA automatic
873 vars stored in NON_SSA_VARS.
875 When BB has edge to RETURN_BB, collect uses in RETURN_BB too.
877 Return false when BB contains something that prevents it from being put into
881 visit_bb (basic_block bb
, basic_block return_bb
,
882 bitmap set_ssa_names
, bitmap used_ssa_names
,
887 bool can_split
= true;
889 for (gimple_stmt_iterator bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
);
892 gimple
*stmt
= gsi_stmt (bsi
);
897 if (is_gimple_debug (stmt
))
900 if (gimple_clobber_p (stmt
))
903 /* FIXME: We can split regions containing EH. We can not however
904 split RESX, EH_DISPATCH and EH_POINTER referring to same region
905 into different partitions. This would require tracking of
906 EH regions and checking in consider_split_point if they
907 are not used elsewhere. */
908 if (gimple_code (stmt
) == GIMPLE_RESX
)
910 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
911 fprintf (dump_file
, "Cannot split: resx.\n");
914 if (gimple_code (stmt
) == GIMPLE_EH_DISPATCH
)
916 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
917 fprintf (dump_file
, "Cannot split: eh dispatch.\n");
921 /* Check builtins that prevent splitting. */
922 if (gimple_code (stmt
) == GIMPLE_CALL
923 && (decl
= gimple_call_fndecl (stmt
)) != NULL_TREE
924 && DECL_BUILT_IN (decl
)
925 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
926 switch (DECL_FUNCTION_CODE (decl
))
928 /* FIXME: once we will allow passing non-parm values to split part,
929 we need to be sure to handle correct builtin_stack_save and
930 builtin_stack_restore. At the moment we are safe; there is no
931 way to store builtin_stack_save result in non-SSA variable
932 since all calls to those are compiler generated. */
934 case BUILT_IN_APPLY_ARGS
:
935 case BUILT_IN_VA_START
:
936 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
938 "Cannot split: builtin_apply and va_start.\n");
941 case BUILT_IN_EH_POINTER
:
942 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
943 fprintf (dump_file
, "Cannot split: builtin_eh_pointer.\n");
950 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
951 bitmap_set_bit (set_ssa_names
, SSA_NAME_VERSION (op
));
952 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
953 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
954 can_split
&= !walk_stmt_load_store_addr_ops (stmt
, non_ssa_vars
,
959 for (gphi_iterator bsi
= gsi_start_phis (bb
); !gsi_end_p (bsi
);
962 gphi
*stmt
= bsi
.phi ();
965 if (virtual_operand_p (gimple_phi_result (stmt
)))
967 bitmap_set_bit (set_ssa_names
,
968 SSA_NAME_VERSION (gimple_phi_result (stmt
)));
969 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
971 tree op
= gimple_phi_arg_def (stmt
, i
);
972 if (TREE_CODE (op
) == SSA_NAME
)
973 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
975 can_split
&= !walk_stmt_load_store_addr_ops (stmt
, non_ssa_vars
,
980 /* Record also uses coming from PHI operand in return BB. */
981 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
982 if (e
->dest
== return_bb
)
984 for (gphi_iterator bsi
= gsi_start_phis (return_bb
);
988 gphi
*stmt
= bsi
.phi ();
989 tree op
= gimple_phi_arg_def (stmt
, e
->dest_idx
);
991 if (virtual_operand_p (gimple_phi_result (stmt
)))
993 if (TREE_CODE (op
) == SSA_NAME
)
994 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
996 can_split
&= !mark_nonssa_use (stmt
, op
, op
, non_ssa_vars
);
1002 /* Stack entry for recursive DFS walk in find_split_point. */
1006 /* Basic block we are examining. */
1009 /* SSA names set and used by the BB and all BBs reachable
1010 from it via DFS walk. */
1011 bitmap set_ssa_names
, used_ssa_names
;
1012 bitmap non_ssa_vars
;
1014 /* All BBS visited from this BB via DFS walk. */
1017 /* Last examined edge in DFS walk. Since we walk unoriented graph,
1018 the value is up to sum of incoming and outgoing edges of BB. */
1019 unsigned int edge_num
;
1021 /* Stack entry index of earliest BB reachable from current BB
1022 or any BB visited later in DFS walk. */
1025 /* Overall time and size of all BBs reached from this BB in DFS walk. */
1026 int overall_time
, overall_size
;
1028 /* When false we can not split on this BB. */
1033 /* Find all articulations and call consider_split on them.
1034 OVERALL_TIME and OVERALL_SIZE is time and size of the function.
1036 We perform basic algorithm for finding an articulation in a graph
1037 created from CFG by considering it to be an unoriented graph.
1039 The articulation is discovered via DFS walk. We collect earliest
1040 basic block on stack that is reachable via backward edge. Articulation
1041 is any basic block such that there is no backward edge bypassing it.
1042 To reduce stack usage we maintain heap allocated stack in STACK vector.
1043 AUX pointer of BB is set to index it appears in the stack or -1 once
1044 it is visited and popped off the stack.
1046 The algorithm finds articulation after visiting the whole component
1047 reachable by it. This makes it convenient to collect information about
1048 the component used by consider_split. */
1051 find_split_points (basic_block return_bb
, int overall_time
, int overall_size
)
1054 vec
<stack_entry
> stack
= vNULL
;
1056 struct split_point current
;
1058 current
.header_time
= overall_time
;
1059 current
.header_size
= overall_size
;
1060 current
.split_time
= 0;
1061 current
.split_size
= 0;
1062 current
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
1064 first
.bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
1066 first
.overall_time
= 0;
1067 first
.overall_size
= 0;
1068 first
.earliest
= INT_MAX
;
1069 first
.set_ssa_names
= 0;
1070 first
.used_ssa_names
= 0;
1071 first
.non_ssa_vars
= 0;
1072 first
.bbs_visited
= 0;
1073 first
.can_split
= false;
1074 stack
.safe_push (first
);
1075 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->aux
= (void *)(intptr_t)-1;
1077 while (!stack
.is_empty ())
1079 stack_entry
*entry
= &stack
.last ();
1081 /* We are walking an acyclic graph, so edge_num counts
1082 succ and pred edges together. However when considering
1083 articulation, we want to have processed everything reachable
1084 from articulation but nothing that reaches into it. */
1085 if (entry
->edge_num
== EDGE_COUNT (entry
->bb
->succs
)
1086 && entry
->bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1088 int pos
= stack
.length ();
1089 entry
->can_split
&= visit_bb (entry
->bb
, return_bb
,
1090 entry
->set_ssa_names
,
1091 entry
->used_ssa_names
,
1092 entry
->non_ssa_vars
);
1093 if (pos
<= entry
->earliest
&& !entry
->can_split
1094 && dump_file
&& (dump_flags
& TDF_DETAILS
))
1096 "found articulation at bb %i but can not split\n",
1098 if (pos
<= entry
->earliest
&& entry
->can_split
)
1100 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1101 fprintf (dump_file
, "found articulation at bb %i\n",
1103 current
.entry_bb
= entry
->bb
;
1104 current
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
1105 bitmap_and_compl (current
.ssa_names_to_pass
,
1106 entry
->used_ssa_names
, entry
->set_ssa_names
);
1107 current
.header_time
= overall_time
- entry
->overall_time
;
1108 current
.header_size
= overall_size
- entry
->overall_size
;
1109 current
.split_time
= entry
->overall_time
;
1110 current
.split_size
= entry
->overall_size
;
1111 current
.split_bbs
= entry
->bbs_visited
;
1112 consider_split (¤t
, entry
->non_ssa_vars
, return_bb
);
1113 BITMAP_FREE (current
.ssa_names_to_pass
);
1116 /* Do actual DFS walk. */
1118 < (EDGE_COUNT (entry
->bb
->succs
)
1119 + EDGE_COUNT (entry
->bb
->preds
)))
1123 if (entry
->edge_num
< EDGE_COUNT (entry
->bb
->succs
))
1125 e
= EDGE_SUCC (entry
->bb
, entry
->edge_num
);
1130 e
= EDGE_PRED (entry
->bb
, entry
->edge_num
1131 - EDGE_COUNT (entry
->bb
->succs
));
1137 /* New BB to visit, push it to the stack. */
1138 if (dest
!= return_bb
&& dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
1141 stack_entry new_entry
;
1143 new_entry
.bb
= dest
;
1144 new_entry
.edge_num
= 0;
1145 new_entry
.overall_time
1146 = bb_info_vec
[dest
->index
].time
;
1147 new_entry
.overall_size
1148 = bb_info_vec
[dest
->index
].size
;
1149 new_entry
.earliest
= INT_MAX
;
1150 new_entry
.set_ssa_names
= BITMAP_ALLOC (NULL
);
1151 new_entry
.used_ssa_names
= BITMAP_ALLOC (NULL
);
1152 new_entry
.bbs_visited
= BITMAP_ALLOC (NULL
);
1153 new_entry
.non_ssa_vars
= BITMAP_ALLOC (NULL
);
1154 new_entry
.can_split
= true;
1155 bitmap_set_bit (new_entry
.bbs_visited
, dest
->index
);
1156 stack
.safe_push (new_entry
);
1157 dest
->aux
= (void *)(intptr_t)stack
.length ();
1159 /* Back edge found, record the earliest point. */
1160 else if ((intptr_t)dest
->aux
> 0
1161 && (intptr_t)dest
->aux
< entry
->earliest
)
1162 entry
->earliest
= (intptr_t)dest
->aux
;
1164 /* We are done with examining the edges. Pop off the value from stack
1165 and merge stuff we accumulate during the walk. */
1166 else if (entry
->bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1168 stack_entry
*prev
= &stack
[stack
.length () - 2];
1170 entry
->bb
->aux
= (void *)(intptr_t)-1;
1171 prev
->can_split
&= entry
->can_split
;
1172 if (prev
->set_ssa_names
)
1174 bitmap_ior_into (prev
->set_ssa_names
, entry
->set_ssa_names
);
1175 bitmap_ior_into (prev
->used_ssa_names
, entry
->used_ssa_names
);
1176 bitmap_ior_into (prev
->bbs_visited
, entry
->bbs_visited
);
1177 bitmap_ior_into (prev
->non_ssa_vars
, entry
->non_ssa_vars
);
1179 if (prev
->earliest
> entry
->earliest
)
1180 prev
->earliest
= entry
->earliest
;
1181 prev
->overall_time
+= entry
->overall_time
;
1182 prev
->overall_size
+= entry
->overall_size
;
1183 BITMAP_FREE (entry
->set_ssa_names
);
1184 BITMAP_FREE (entry
->used_ssa_names
);
1185 BITMAP_FREE (entry
->bbs_visited
);
1186 BITMAP_FREE (entry
->non_ssa_vars
);
1192 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->aux
= NULL
;
1193 FOR_EACH_BB_FN (bb
, cfun
)
1196 BITMAP_FREE (current
.ssa_names_to_pass
);
1199 /* Split function at SPLIT_POINT. */
1202 split_function (basic_block return_bb
, struct split_point
*split_point
,
1203 bool add_tsan_func_exit
)
1205 vec
<tree
> args_to_pass
= vNULL
;
1206 bitmap args_to_skip
;
1209 cgraph_node
*node
, *cur_node
= cgraph_node::get (current_function_decl
);
1210 basic_block call_bb
;
1211 gcall
*call
, *tsan_func_exit_call
= NULL
;
1214 tree retval
= NULL
, real_retval
= NULL
, retbnd
= NULL
;
1215 bool with_bounds
= chkp_function_instrumented_p (current_function_decl
);
1216 gimple
*last_stmt
= NULL
;
1222 fprintf (dump_file
, "\n\nSplitting function at:\n");
1223 dump_split_point (dump_file
, split_point
);
1226 if (cur_node
->local
.can_change_signature
)
1227 args_to_skip
= BITMAP_ALLOC (NULL
);
1229 args_to_skip
= NULL
;
1231 /* Collect the parameters of new function and args_to_skip bitmap. */
1232 for (parm
= DECL_ARGUMENTS (current_function_decl
);
1233 parm
; parm
= DECL_CHAIN (parm
), num
++)
1235 && (!is_gimple_reg (parm
)
1236 || (ddef
= ssa_default_def (cfun
, parm
)) == NULL_TREE
1237 || !bitmap_bit_p (split_point
->ssa_names_to_pass
,
1238 SSA_NAME_VERSION (ddef
))))
1239 bitmap_set_bit (args_to_skip
, num
);
1242 /* This parm might not have been used up to now, but is going to be
1243 used, hence register it. */
1244 if (is_gimple_reg (parm
))
1245 arg
= get_or_create_ssa_default_def (cfun
, parm
);
1249 if (!useless_type_conversion_p (DECL_ARG_TYPE (parm
), TREE_TYPE (arg
)))
1250 arg
= fold_convert (DECL_ARG_TYPE (parm
), arg
);
1251 args_to_pass
.safe_push (arg
);
1254 /* See if the split function will return. */
1255 bool split_part_return_p
= false;
1256 FOR_EACH_EDGE (e
, ei
, return_bb
->preds
)
1258 if (bitmap_bit_p (split_point
->split_bbs
, e
->src
->index
))
1259 split_part_return_p
= true;
1262 /* Add return block to what will become the split function.
1263 We do not return; no return block is needed. */
1264 if (!split_part_return_p
)
1266 /* We have no return block, so nothing is needed. */
1267 else if (return_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1269 /* When we do not want to return value, we need to construct
1270 new return block with empty return statement.
1271 FIXME: Once we are able to change return type, we should change function
1272 to return void instead of just outputting function with undefined return
1273 value. For structures this affects quality of codegen. */
1274 else if ((retval
= find_retval (return_bb
))
1275 && !split_point
->split_part_set_retval
)
1277 bool redirected
= true;
1278 basic_block new_return_bb
= create_basic_block (NULL
, 0, return_bb
);
1279 gimple_stmt_iterator gsi
= gsi_start_bb (new_return_bb
);
1280 gsi_insert_after (&gsi
, gimple_build_return (NULL
), GSI_NEW_STMT
);
1284 FOR_EACH_EDGE (e
, ei
, return_bb
->preds
)
1285 if (bitmap_bit_p (split_point
->split_bbs
, e
->src
->index
))
1287 new_return_bb
->count
+= e
->count
;
1288 new_return_bb
->frequency
+= EDGE_FREQUENCY (e
);
1289 redirect_edge_and_branch (e
, new_return_bb
);
1294 e
= make_edge (new_return_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
1295 e
->probability
= REG_BR_PROB_BASE
;
1296 e
->count
= new_return_bb
->count
;
1297 add_bb_to_loop (new_return_bb
, current_loops
->tree_root
);
1298 bitmap_set_bit (split_point
->split_bbs
, new_return_bb
->index
);
1299 retbnd
= find_retbnd (return_bb
);
1301 /* When we pass around the value, use existing return block. */
1304 bitmap_set_bit (split_point
->split_bbs
, return_bb
->index
);
1305 retbnd
= find_retbnd (return_bb
);
1308 /* If RETURN_BB has virtual operand PHIs, they must be removed and the
1309 virtual operand marked for renaming as we change the CFG in a way that
1310 tree-inline is not able to compensate for.
1312 Note this can happen whether or not we have a return value. If we have
1313 a return value, then RETURN_BB may have PHIs for real operands too. */
1314 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1317 for (gphi_iterator gsi
= gsi_start_phis (return_bb
);
1320 gphi
*stmt
= gsi
.phi ();
1321 if (!virtual_operand_p (gimple_phi_result (stmt
)))
1326 mark_virtual_phi_result_for_renaming (stmt
);
1327 remove_phi_node (&gsi
, true);
1330 /* In reality we have to rename the reaching definition of the
1331 virtual operand at return_bb as we will eventually release it
1332 when we remove the code region we outlined.
1333 So we have to rename all immediate virtual uses of that region
1334 if we didn't see a PHI definition yet. */
1335 /* ??? In real reality we want to set the reaching vdef of the
1336 entry of the SESE region as the vuse of the call and the reaching
1337 vdef of the exit of the SESE region as the vdef of the call. */
1339 for (gimple_stmt_iterator gsi
= gsi_start_bb (return_bb
);
1343 gimple
*stmt
= gsi_stmt (gsi
);
1344 if (gimple_vuse (stmt
))
1346 gimple_set_vuse (stmt
, NULL_TREE
);
1349 if (gimple_vdef (stmt
))
1354 /* Now create the actual clone. */
1355 cgraph_edge::rebuild_edges ();
1356 node
= cur_node
->create_version_clone_with_body
1357 (vNULL
, NULL
, args_to_skip
,
1358 !split_part_return_p
|| !split_point
->split_part_set_retval
,
1359 split_point
->split_bbs
, split_point
->entry_bb
, "part");
1361 node
->split_part
= true;
1363 /* Let's take a time profile for splitted function. */
1364 node
->tp_first_run
= cur_node
->tp_first_run
+ 1;
1366 /* For usual cloning it is enough to clear builtin only when signature
1367 changes. For partial inlining we however can not expect the part
1368 of builtin implementation to have same semantic as the whole. */
1369 if (DECL_BUILT_IN (node
->decl
))
1371 DECL_BUILT_IN_CLASS (node
->decl
) = NOT_BUILT_IN
;
1372 DECL_FUNCTION_CODE (node
->decl
) = (enum built_in_function
) 0;
1375 /* If return_bb contains any clobbers that refer to SSA_NAMEs
1376 set in the split part, remove them. Also reset debug stmts that
1377 refer to SSA_NAMEs set in the split part. */
1378 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1380 gimple_stmt_iterator gsi
= gsi_start_bb (return_bb
);
1381 while (!gsi_end_p (gsi
))
1385 gimple
*stmt
= gsi_stmt (gsi
);
1386 bool remove
= false;
1387 if (gimple_clobber_p (stmt
) || is_gimple_debug (stmt
))
1388 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
1390 basic_block bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
1394 && bitmap_bit_p (split_point
->split_bbs
, bb
->index
))
1396 if (is_gimple_debug (stmt
))
1398 gimple_debug_bind_reset_value (stmt
);
1407 gsi_remove (&gsi
, true);
1413 /* If the original function is instrumented then it's
1414 part is also instrumented. */
1416 chkp_function_mark_instrumented (node
->decl
);
1418 /* If the original function is declared inline, there is no point in issuing
1419 a warning for the non-inlinable part. */
1420 DECL_NO_INLINE_WARNING_P (node
->decl
) = 1;
1421 cur_node
->remove_callees ();
1422 cur_node
->remove_all_references ();
1423 if (!split_part_return_p
)
1424 TREE_THIS_VOLATILE (node
->decl
) = 1;
1426 dump_function_to_file (node
->decl
, dump_file
, dump_flags
);
1428 /* Create the basic block we place call into. It is the entry basic block
1429 split after last label. */
1430 call_bb
= split_point
->entry_bb
;
1431 for (gimple_stmt_iterator gsi
= gsi_start_bb (call_bb
); !gsi_end_p (gsi
);)
1432 if (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
)
1434 last_stmt
= gsi_stmt (gsi
);
1439 e
= split_block (split_point
->entry_bb
, last_stmt
);
1442 /* Produce the call statement. */
1443 gimple_stmt_iterator gsi
= gsi_last_bb (call_bb
);
1444 FOR_EACH_VEC_ELT (args_to_pass
, i
, arg
)
1445 if (!is_gimple_val (arg
))
1447 arg
= force_gimple_operand_gsi (&gsi
, arg
, true, NULL_TREE
,
1448 false, GSI_CONTINUE_LINKING
);
1449 args_to_pass
[i
] = arg
;
1451 call
= gimple_build_call_vec (node
->decl
, args_to_pass
);
1452 gimple_call_set_with_bounds (call
, with_bounds
);
1453 gimple_set_block (call
, DECL_INITIAL (current_function_decl
));
1454 args_to_pass
.release ();
1456 /* For optimized away parameters, add on the caller side
1458 DEBUG D#X => parm_Y(D)
1459 stmts and associate D#X with parm in decl_debug_args_lookup
1460 vector to say for debug info that if parameter parm had been passed,
1461 it would have value parm_Y(D). */
1464 vec
<tree
, va_gc
> **debug_args
= NULL
;
1465 unsigned i
= 0, len
= 0;
1466 if (MAY_HAVE_DEBUG_STMTS
)
1468 debug_args
= decl_debug_args_lookup (node
->decl
);
1470 len
= vec_safe_length (*debug_args
);
1472 for (parm
= DECL_ARGUMENTS (current_function_decl
), num
= 0;
1473 parm
; parm
= DECL_CHAIN (parm
), num
++)
1474 if (bitmap_bit_p (args_to_skip
, num
) && is_gimple_reg (parm
))
1479 /* This needs to be done even without MAY_HAVE_DEBUG_STMTS,
1480 otherwise if it didn't exist before, we'd end up with
1481 different SSA_NAME_VERSIONs between -g and -g0. */
1482 arg
= get_or_create_ssa_default_def (cfun
, parm
);
1483 if (!MAY_HAVE_DEBUG_STMTS
|| debug_args
== NULL
)
1486 while (i
< len
&& (**debug_args
)[i
] != DECL_ORIGIN (parm
))
1490 ddecl
= (**debug_args
)[i
+ 1];
1492 = gimple_build_debug_bind (ddecl
, unshare_expr (arg
), call
);
1493 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
1497 /* We avoid address being taken on any variable used by split part,
1498 so return slot optimization is always possible. Moreover this is
1499 required to make DECL_BY_REFERENCE work. */
1500 if (aggregate_value_p (DECL_RESULT (current_function_decl
),
1501 TREE_TYPE (current_function_decl
))
1502 && (!is_gimple_reg_type (TREE_TYPE (DECL_RESULT (current_function_decl
)))
1503 || DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
))))
1504 gimple_call_set_return_slot_opt (call
, true);
1506 if (add_tsan_func_exit
)
1507 tsan_func_exit_call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
1509 /* Update return value. This is bit tricky. When we do not return,
1510 do nothing. When we return we might need to update return_bb
1511 or produce a new return statement. */
1512 if (!split_part_return_p
)
1514 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1515 if (tsan_func_exit_call
)
1516 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1520 e
= make_edge (call_bb
, return_bb
,
1521 return_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
1522 ? 0 : EDGE_FALLTHRU
);
1523 e
->count
= call_bb
->count
;
1524 e
->probability
= REG_BR_PROB_BASE
;
1526 /* If there is return basic block, see what value we need to store
1527 return value into and put call just before it. */
1528 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1530 real_retval
= retval
;
1531 if (real_retval
&& split_point
->split_part_set_retval
)
1535 /* See if we need new SSA_NAME for the result.
1536 When DECL_BY_REFERENCE is true, retval is actually pointer to
1537 return value and it is constant in whole function. */
1538 if (TREE_CODE (retval
) == SSA_NAME
1539 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1541 retval
= copy_ssa_name (retval
, call
);
1543 /* See if there is PHI defining return value. */
1544 for (psi
= gsi_start_phis (return_bb
);
1545 !gsi_end_p (psi
); gsi_next (&psi
))
1546 if (!virtual_operand_p (gimple_phi_result (psi
.phi ())))
1549 /* When there is PHI, just update its value. */
1550 if (TREE_CODE (retval
) == SSA_NAME
1551 && !gsi_end_p (psi
))
1552 add_phi_arg (psi
.phi (), retval
, e
, UNKNOWN_LOCATION
);
1553 /* Otherwise update the return BB itself.
1554 find_return_bb allows at most one assignment to return value,
1555 so update first statement. */
1558 gimple_stmt_iterator bsi
;
1559 for (bsi
= gsi_start_bb (return_bb
); !gsi_end_p (bsi
);
1561 if (greturn
*return_stmt
1562 = dyn_cast
<greturn
*> (gsi_stmt (bsi
)))
1564 gimple_return_set_retval (return_stmt
, retval
);
1567 else if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_ASSIGN
1568 && !gimple_clobber_p (gsi_stmt (bsi
)))
1570 gimple_assign_set_rhs1 (gsi_stmt (bsi
), retval
);
1573 update_stmt (gsi_stmt (bsi
));
1574 /* Also adjust clobbers and debug stmts in return_bb. */
1575 for (bsi
= gsi_start_bb (return_bb
); !gsi_end_p (bsi
);
1578 gimple
*stmt
= gsi_stmt (bsi
);
1579 if (gimple_clobber_p (stmt
)
1580 || is_gimple_debug (stmt
))
1583 use_operand_p use_p
;
1584 bool update
= false;
1585 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
,
1587 if (USE_FROM_PTR (use_p
) == real_retval
)
1589 SET_USE (use_p
, retval
);
1598 /* Replace retbnd with new one. */
1601 gimple_stmt_iterator bsi
;
1602 for (bsi
= gsi_last_bb (return_bb
); !gsi_end_p (bsi
);
1604 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_RETURN
)
1606 retbnd
= copy_ssa_name (retbnd
, call
);
1607 gimple_return_set_retbnd (gsi_stmt (bsi
), retbnd
);
1608 update_stmt (gsi_stmt (bsi
));
1613 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1615 gimple_call_set_lhs (call
, build_simple_mem_ref (retval
));
1616 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1621 restype
= TREE_TYPE (DECL_RESULT (current_function_decl
));
1622 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1623 if (!useless_type_conversion_p (TREE_TYPE (retval
), restype
))
1626 tree tem
= create_tmp_reg (restype
);
1627 tem
= make_ssa_name (tem
, call
);
1628 cpy
= gimple_build_assign (retval
, NOP_EXPR
, tem
);
1629 gsi_insert_after (&gsi
, cpy
, GSI_NEW_STMT
);
1632 /* Build bndret call to obtain returned bounds. */
1634 chkp_insert_retbnd_call (retbnd
, retval
, &gsi
);
1635 gimple_call_set_lhs (call
, retval
);
1640 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1641 if (tsan_func_exit_call
)
1642 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1644 /* We don't use return block (there is either no return in function or
1645 multiple of them). So create new basic block with return statement.
1650 if (split_point
->split_part_set_retval
1651 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1653 retval
= DECL_RESULT (current_function_decl
);
1655 if (chkp_function_instrumented_p (current_function_decl
)
1656 && BOUNDED_P (retval
))
1657 retbnd
= create_tmp_reg (pointer_bounds_type_node
);
1659 /* We use temporary register to hold value when aggregate_value_p
1660 is false. Similarly for DECL_BY_REFERENCE we must avoid extra
1662 if (!aggregate_value_p (retval
, TREE_TYPE (current_function_decl
))
1663 && !DECL_BY_REFERENCE (retval
))
1664 retval
= create_tmp_reg (TREE_TYPE (retval
));
1665 if (is_gimple_reg (retval
))
1667 /* When returning by reference, there is only one SSA name
1668 assigned to RESULT_DECL (that is pointer to return value).
1669 Look it up or create new one if it is missing. */
1670 if (DECL_BY_REFERENCE (retval
))
1671 retval
= get_or_create_ssa_default_def (cfun
, retval
);
1672 /* Otherwise produce new SSA name for return value. */
1674 retval
= make_ssa_name (retval
, call
);
1676 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1677 gimple_call_set_lhs (call
, build_simple_mem_ref (retval
));
1679 gimple_call_set_lhs (call
, retval
);
1680 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1684 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1686 && is_gimple_reg_type (TREE_TYPE (retval
))
1687 && !is_gimple_val (retval
))
1690 = gimple_build_assign (make_ssa_name (TREE_TYPE (retval
)),
1692 retval
= gimple_assign_lhs (g
);
1693 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1696 /* Build bndret call to obtain returned bounds. */
1698 chkp_insert_retbnd_call (retbnd
, retval
, &gsi
);
1699 if (tsan_func_exit_call
)
1700 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1701 ret
= gimple_build_return (retval
);
1702 gsi_insert_after (&gsi
, ret
, GSI_NEW_STMT
);
1705 free_dominance_info (CDI_DOMINATORS
);
1706 free_dominance_info (CDI_POST_DOMINATORS
);
1707 compute_inline_parameters (node
, true);
1710 /* Execute function splitting pass. */
1713 execute_split_functions (void)
1715 gimple_stmt_iterator bsi
;
1717 int overall_time
= 0, overall_size
= 0;
1719 struct cgraph_node
*node
= cgraph_node::get (current_function_decl
);
1721 if (flags_from_decl_or_type (current_function_decl
)
1722 & (ECF_NORETURN
|ECF_MALLOC
))
1725 fprintf (dump_file
, "Not splitting: noreturn/malloc function.\n");
1728 if (MAIN_NAME_P (DECL_NAME (current_function_decl
)))
1731 fprintf (dump_file
, "Not splitting: main function.\n");
1734 /* This can be relaxed; function might become inlinable after splitting
1735 away the uninlinable part. */
1736 if (inline_edge_summary_vec
.exists ()
1737 && !inline_summaries
->get (node
)->inlinable
)
1740 fprintf (dump_file
, "Not splitting: not inlinable.\n");
1743 if (DECL_DISREGARD_INLINE_LIMITS (node
->decl
))
1746 fprintf (dump_file
, "Not splitting: disregarding inline limits.\n");
1749 /* This can be relaxed; most of versioning tests actually prevents
1751 if (!tree_versionable_function_p (current_function_decl
))
1754 fprintf (dump_file
, "Not splitting: not versionable.\n");
1757 /* FIXME: we could support this. */
1758 if (DECL_STRUCT_FUNCTION (current_function_decl
)->static_chain_decl
)
1761 fprintf (dump_file
, "Not splitting: nested function.\n");
1765 /* See if it makes sense to try to split.
1766 It makes sense to split if we inline, that is if we have direct calls to
1767 handle or direct calls are possibly going to appear as result of indirect
1768 inlining or LTO. Also handle -fprofile-generate as LTO to allow non-LTO
1769 training for LTO -fprofile-use build.
1771 Note that we are not completely conservative about disqualifying functions
1772 called once. It is possible that the caller is called more then once and
1773 then inlining would still benefit. */
1775 /* Local functions called once will be completely inlined most of time. */
1776 || (!node
->callers
->next_caller
&& node
->local
.local
))
1777 && !node
->address_taken
1778 && !node
->has_aliases_p ()
1779 && (!flag_lto
|| !node
->externally_visible
))
1782 fprintf (dump_file
, "Not splitting: not called directly "
1783 "or called once.\n");
1787 /* FIXME: We can actually split if splitting reduces call overhead. */
1788 if (!flag_inline_small_functions
1789 && !DECL_DECLARED_INLINE_P (current_function_decl
))
1792 fprintf (dump_file
, "Not splitting: not autoinlining and function"
1793 " is not inline.\n");
1797 /* We enforce splitting after loop headers when profile info is not
1799 if (profile_status_for_fn (cfun
) != PROFILE_READ
)
1800 mark_dfs_back_edges ();
1802 /* Initialize bitmap to track forbidden calls. */
1803 forbidden_dominators
= BITMAP_ALLOC (NULL
);
1804 calculate_dominance_info (CDI_DOMINATORS
);
1806 /* Compute local info about basic blocks and determine function size/time. */
1807 bb_info_vec
.safe_grow_cleared (last_basic_block_for_fn (cfun
) + 1);
1808 memset (&best_split_point
, 0, sizeof (best_split_point
));
1809 basic_block return_bb
= find_return_bb ();
1810 int tsan_exit_found
= -1;
1811 FOR_EACH_BB_FN (bb
, cfun
)
1815 int freq
= compute_call_stmt_bb_frequency (current_function_decl
, bb
);
1817 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1818 fprintf (dump_file
, "Basic block %i\n", bb
->index
);
1820 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
1822 int this_time
, this_size
;
1823 gimple
*stmt
= gsi_stmt (bsi
);
1825 this_size
= estimate_num_insns (stmt
, &eni_size_weights
);
1826 this_time
= estimate_num_insns (stmt
, &eni_time_weights
) * freq
;
1829 check_forbidden_calls (stmt
);
1831 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1833 fprintf (dump_file
, " freq:%6i size:%3i time:%3i ",
1834 freq
, this_size
, this_time
);
1835 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1838 if ((flag_sanitize
& SANITIZE_THREAD
)
1839 && gimple_call_internal_p (stmt
, IFN_TSAN_FUNC_EXIT
))
1841 /* We handle TSAN_FUNC_EXIT for splitting either in the
1842 return_bb, or in its immediate predecessors. */
1843 if ((bb
!= return_bb
&& !find_edge (bb
, return_bb
))
1844 || (tsan_exit_found
!= -1
1845 && tsan_exit_found
!= (bb
!= return_bb
)))
1848 fprintf (dump_file
, "Not splitting: TSAN_FUNC_EXIT"
1849 " in unexpected basic block.\n");
1850 BITMAP_FREE (forbidden_dominators
);
1851 bb_info_vec
.release ();
1854 tsan_exit_found
= bb
!= return_bb
;
1857 overall_time
+= time
;
1858 overall_size
+= size
;
1859 bb_info_vec
[bb
->index
].time
= time
;
1860 bb_info_vec
[bb
->index
].size
= size
;
1862 find_split_points (return_bb
, overall_time
, overall_size
);
1863 if (best_split_point
.split_bbs
)
1865 split_function (return_bb
, &best_split_point
, tsan_exit_found
== 1);
1866 BITMAP_FREE (best_split_point
.ssa_names_to_pass
);
1867 BITMAP_FREE (best_split_point
.split_bbs
);
1868 todo
= TODO_update_ssa
| TODO_cleanup_cfg
;
1870 BITMAP_FREE (forbidden_dominators
);
1871 bb_info_vec
.release ();
1877 const pass_data pass_data_split_functions
=
1879 GIMPLE_PASS
, /* type */
1880 "fnsplit", /* name */
1881 OPTGROUP_NONE
, /* optinfo_flags */
1882 TV_IPA_FNSPLIT
, /* tv_id */
1883 PROP_cfg
, /* properties_required */
1884 0, /* properties_provided */
1885 0, /* properties_destroyed */
1886 0, /* todo_flags_start */
1887 0, /* todo_flags_finish */
1890 class pass_split_functions
: public gimple_opt_pass
1893 pass_split_functions (gcc::context
*ctxt
)
1894 : gimple_opt_pass (pass_data_split_functions
, ctxt
)
1897 /* opt_pass methods: */
1898 virtual bool gate (function
*);
1899 virtual unsigned int execute (function
*)
1901 return execute_split_functions ();
1904 }; // class pass_split_functions
1907 pass_split_functions::gate (function
*)
1909 /* When doing profile feedback, we want to execute the pass after profiling
1910 is read. So disable one in early optimization. */
1911 return (flag_partial_inlining
1912 && !profile_arc_flag
&& !flag_branch_probabilities
);
1918 make_pass_split_functions (gcc::context
*ctxt
)
1920 return new pass_split_functions (ctxt
);
1923 /* Execute function splitting pass. */
1926 execute_feedback_split_functions (void)
1928 unsigned int retval
= execute_split_functions ();
1930 retval
|= TODO_rebuild_cgraph_edges
;
1936 const pass_data pass_data_feedback_split_functions
=
1938 GIMPLE_PASS
, /* type */
1939 "feedback_fnsplit", /* name */
1940 OPTGROUP_NONE
, /* optinfo_flags */
1941 TV_IPA_FNSPLIT
, /* tv_id */
1942 PROP_cfg
, /* properties_required */
1943 0, /* properties_provided */
1944 0, /* properties_destroyed */
1945 0, /* todo_flags_start */
1946 0, /* todo_flags_finish */
1949 class pass_feedback_split_functions
: public gimple_opt_pass
1952 pass_feedback_split_functions (gcc::context
*ctxt
)
1953 : gimple_opt_pass (pass_data_feedback_split_functions
, ctxt
)
1956 /* opt_pass methods: */
1957 virtual bool gate (function
*);
1958 virtual unsigned int execute (function
*)
1960 return execute_feedback_split_functions ();
1963 }; // class pass_feedback_split_functions
1966 pass_feedback_split_functions::gate (function
*)
1968 /* We don't need to split when profiling at all, we are producing
1969 lousy code anyway. */
1970 return (flag_partial_inlining
1971 && flag_branch_probabilities
);
1977 make_pass_feedback_split_functions (gcc::context
*ctxt
)
1979 return new pass_feedback_split_functions (ctxt
);