1 /* Function splitting pass
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka <jh@suse.cz>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* The purpose of this pass is to split function bodies to improve
22 inlining. I.e. for function of the form:
47 When func becomes inlinable and when cheap_test is often true, inlining func,
48 but not fund.part leads to performance improvement similar as inlining
49 original func while the code size growth is smaller.
51 The pass is organized in three stages:
52 1) Collect local info about basic block into BB_INFO structure and
53 compute function body estimated size and time.
54 2) Via DFS walk find all possible basic blocks where we can split
56 3) If split point is found, split at the specified BB by creating a clone
57 and updating function to call it.
59 The decisions what functions to split are in execute_split_functions
62 There are several possible future improvements for this pass including:
64 1) Splitting to break up large functions
65 2) Splitting to reduce stack frame usage
66 3) Allow split part of function to use values computed in the header part.
67 The values needs to be passed to split function, perhaps via same
68 interface as for nested functions or as argument.
69 4) Support for simple rematerialization. I.e. when split part use
70 value computed in header from function parameter in very cheap way, we
71 can just recompute it.
72 5) Support splitting of nested functions.
73 6) Support non-SSA arguments.
74 7) There is nothing preventing us from producing multiple parts of single function
75 when needed or splitting also the parts. */
79 #include "coretypes.h"
85 #include "alloc-pool.h"
86 #include "tree-pass.h"
89 #include "diagnostic.h"
90 #include "fold-const.h"
94 #include "gimple-iterator.h"
95 #include "gimplify-me.h"
96 #include "gimple-walk.h"
97 #include "symbol-summary.h"
100 #include "tree-into-ssa.h"
101 #include "tree-dfa.h"
102 #include "tree-inline.h"
104 #include "gimple-pretty-print.h"
105 #include "ipa-inline.h"
107 #include "tree-chkp.h"
109 /* Per basic block info. */
117 static vec
<split_bb_info
> bb_info_vec
;
119 /* Description of split point. */
123 /* Size of the partitions. */
124 unsigned int header_time
, header_size
, split_time
, split_size
;
126 /* SSA names that need to be passed into spit function. */
127 bitmap ssa_names_to_pass
;
129 /* Basic block where we split (that will become entry point of new function. */
130 basic_block entry_bb
;
132 /* Basic blocks we are splitting away. */
135 /* True when return value is computed on split part and thus it needs
137 bool split_part_set_retval
;
140 /* Best split point found. */
142 struct split_point best_split_point
;
144 /* Set of basic blocks that are not allowed to dominate a split point. */
146 static bitmap forbidden_dominators
;
148 static tree
find_retval (basic_block return_bb
);
149 static tree
find_retbnd (basic_block return_bb
);
151 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
152 variable, check it if it is present in bitmap passed via DATA. */
155 test_nonssa_use (gimple
*, tree t
, tree
, void *data
)
157 t
= get_base_address (t
);
159 if (!t
|| is_gimple_reg (t
))
162 if (TREE_CODE (t
) == PARM_DECL
163 || (TREE_CODE (t
) == VAR_DECL
164 && auto_var_in_fn_p (t
, current_function_decl
))
165 || TREE_CODE (t
) == RESULT_DECL
166 /* Normal labels are part of CFG and will be handled gratefuly.
167 Forced labels however can be used directly by statements and
168 need to stay in one partition along with their uses. */
169 || (TREE_CODE (t
) == LABEL_DECL
170 && FORCED_LABEL (t
)))
171 return bitmap_bit_p ((bitmap
)data
, DECL_UID (t
));
173 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
174 to pretend that the value pointed to is actual result decl. */
175 if ((TREE_CODE (t
) == MEM_REF
|| INDIRECT_REF_P (t
))
176 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
177 && SSA_NAME_VAR (TREE_OPERAND (t
, 0))
178 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t
, 0))) == RESULT_DECL
179 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
181 bitmap_bit_p ((bitmap
)data
,
182 DECL_UID (DECL_RESULT (current_function_decl
)));
187 /* Dump split point CURRENT. */
190 dump_split_point (FILE * file
, struct split_point
*current
)
193 "Split point at BB %i\n"
194 " header time: %i header size: %i\n"
195 " split time: %i split size: %i\n bbs: ",
196 current
->entry_bb
->index
, current
->header_time
,
197 current
->header_size
, current
->split_time
, current
->split_size
);
198 dump_bitmap (file
, current
->split_bbs
);
199 fprintf (file
, " SSA names to pass: ");
200 dump_bitmap (file
, current
->ssa_names_to_pass
);
203 /* Look for all BBs in header that might lead to the split part and verify
204 that they are not defining any non-SSA var used by the split part.
205 Parameters are the same as for consider_split. */
208 verify_non_ssa_vars (struct split_point
*current
, bitmap non_ssa_vars
,
209 basic_block return_bb
)
211 bitmap seen
= BITMAP_ALLOC (NULL
);
212 vec
<basic_block
> worklist
= vNULL
;
218 FOR_EACH_EDGE (e
, ei
, current
->entry_bb
->preds
)
219 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
220 && !bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
222 worklist
.safe_push (e
->src
);
223 bitmap_set_bit (seen
, e
->src
->index
);
226 while (!worklist
.is_empty ())
228 bb
= worklist
.pop ();
229 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
230 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
231 && bitmap_set_bit (seen
, e
->src
->index
))
233 gcc_checking_assert (!bitmap_bit_p (current
->split_bbs
,
235 worklist
.safe_push (e
->src
);
237 for (gimple_stmt_iterator bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
);
240 gimple
*stmt
= gsi_stmt (bsi
);
241 if (is_gimple_debug (stmt
))
243 if (walk_stmt_load_store_addr_ops
244 (stmt
, non_ssa_vars
, test_nonssa_use
, test_nonssa_use
,
250 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
251 if (test_nonssa_use (stmt
, gimple_label_label (label_stmt
),
252 NULL_TREE
, non_ssa_vars
))
258 for (gphi_iterator bsi
= gsi_start_phis (bb
); !gsi_end_p (bsi
);
261 if (walk_stmt_load_store_addr_ops
262 (gsi_stmt (bsi
), non_ssa_vars
, test_nonssa_use
, test_nonssa_use
,
269 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
271 if (e
->dest
!= return_bb
)
273 for (gphi_iterator bsi
= gsi_start_phis (return_bb
);
277 gphi
*stmt
= bsi
.phi ();
278 tree op
= gimple_phi_arg_def (stmt
, e
->dest_idx
);
280 if (virtual_operand_p (gimple_phi_result (stmt
)))
282 if (TREE_CODE (op
) != SSA_NAME
283 && test_nonssa_use (stmt
, op
, op
, non_ssa_vars
))
292 /* Verify that the rest of function does not define any label
293 used by the split part. */
294 FOR_EACH_BB_FN (bb
, cfun
)
295 if (!bitmap_bit_p (current
->split_bbs
, bb
->index
)
296 && !bitmap_bit_p (seen
, bb
->index
))
298 gimple_stmt_iterator bsi
;
299 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
300 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (bsi
)))
302 if (test_nonssa_use (label_stmt
,
303 gimple_label_label (label_stmt
),
304 NULL_TREE
, non_ssa_vars
))
320 /* If STMT is a call, check the callee against a list of forbidden
321 predicate functions. If a match is found, look for uses of the
322 call result in condition statements that compare against zero.
323 For each such use, find the block targeted by the condition
324 statement for the nonzero result, and set the bit for this block
325 in the forbidden dominators bitmap. The purpose of this is to avoid
326 selecting a split point where we are likely to lose the chance
327 to optimize away an unused function call. */
330 check_forbidden_calls (gimple
*stmt
)
332 imm_use_iterator use_iter
;
336 /* At the moment, __builtin_constant_p is the only forbidden
337 predicate function call (see PR49642). */
338 if (!gimple_call_builtin_p (stmt
, BUILT_IN_CONSTANT_P
))
341 lhs
= gimple_call_lhs (stmt
);
343 if (!lhs
|| TREE_CODE (lhs
) != SSA_NAME
)
346 FOR_EACH_IMM_USE_FAST (use_p
, use_iter
, lhs
)
349 basic_block use_bb
, forbidden_bb
;
351 edge true_edge
, false_edge
;
354 use_stmt
= dyn_cast
<gcond
*> (USE_STMT (use_p
));
358 /* Assuming canonical form for GIMPLE_COND here, with constant
359 in second position. */
360 op1
= gimple_cond_rhs (use_stmt
);
361 code
= gimple_cond_code (use_stmt
);
362 use_bb
= gimple_bb (use_stmt
);
364 extract_true_false_edges_from_block (use_bb
, &true_edge
, &false_edge
);
366 /* We're only interested in comparisons that distinguish
367 unambiguously from zero. */
368 if (!integer_zerop (op1
) || code
== LE_EXPR
|| code
== GE_EXPR
)
372 forbidden_bb
= false_edge
->dest
;
374 forbidden_bb
= true_edge
->dest
;
376 bitmap_set_bit (forbidden_dominators
, forbidden_bb
->index
);
380 /* If BB is dominated by any block in the forbidden dominators set,
381 return TRUE; else FALSE. */
384 dominated_by_forbidden (basic_block bb
)
389 EXECUTE_IF_SET_IN_BITMAP (forbidden_dominators
, 1, dom_bb
, bi
)
391 if (dominated_by_p (CDI_DOMINATORS
, bb
,
392 BASIC_BLOCK_FOR_FN (cfun
, dom_bb
)))
399 /* For give split point CURRENT and return block RETURN_BB return 1
400 if ssa name VAL is set by split part and 0 otherwise. */
402 split_part_set_ssa_name_p (tree val
, struct split_point
*current
,
403 basic_block return_bb
)
405 if (TREE_CODE (val
) != SSA_NAME
)
408 return (!SSA_NAME_IS_DEFAULT_DEF (val
)
409 && (bitmap_bit_p (current
->split_bbs
,
410 gimple_bb (SSA_NAME_DEF_STMT (val
))->index
)
411 || gimple_bb (SSA_NAME_DEF_STMT (val
)) == return_bb
));
414 /* We found an split_point CURRENT. NON_SSA_VARS is bitmap of all non ssa
415 variables used and RETURN_BB is return basic block.
416 See if we can split function here. */
419 consider_split (struct split_point
*current
, bitmap non_ssa_vars
,
420 basic_block return_bb
)
423 unsigned int num_args
= 0;
424 unsigned int call_overhead
;
429 int incoming_freq
= 0;
432 bool back_edge
= false;
434 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
435 dump_split_point (dump_file
, current
);
437 FOR_EACH_EDGE (e
, ei
, current
->entry_bb
->preds
)
439 if (e
->flags
& EDGE_DFS_BACK
)
441 if (!bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
442 incoming_freq
+= EDGE_FREQUENCY (e
);
445 /* Do not split when we would end up calling function anyway. */
447 >= (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
448 * PARAM_VALUE (PARAM_PARTIAL_INLINING_ENTRY_PROBABILITY
) / 100))
450 /* When profile is guessed, we can not expect it to give us
451 realistic estimate on likelyness of function taking the
452 complex path. As a special case, when tail of the function is
453 a loop, enable splitting since inlining code skipping the loop
454 is likely noticeable win. */
456 && profile_status_for_fn (cfun
) != PROFILE_READ
457 && incoming_freq
< ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
)
459 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
461 " Split before loop, accepting despite low frequencies %i %i.\n",
463 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
);
467 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
469 " Refused: incoming frequency is too large.\n");
474 if (!current
->header_size
)
476 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
477 fprintf (dump_file
, " Refused: header empty\n");
481 /* Verify that PHI args on entry are either virtual or all their operands
482 incoming from header are the same. */
483 for (bsi
= gsi_start_phis (current
->entry_bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
485 gphi
*stmt
= bsi
.phi ();
488 if (virtual_operand_p (gimple_phi_result (stmt
)))
490 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
492 edge e
= gimple_phi_arg_edge (stmt
, i
);
493 if (!bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
495 tree edge_val
= gimple_phi_arg_def (stmt
, i
);
496 if (val
&& edge_val
!= val
)
498 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
500 " Refused: entry BB has PHI with multiple variants\n");
509 /* See what argument we will pass to the split function and compute
511 call_overhead
= eni_size_weights
.call_cost
;
512 for (parm
= DECL_ARGUMENTS (current_function_decl
); parm
;
513 parm
= DECL_CHAIN (parm
))
515 if (!is_gimple_reg (parm
))
517 if (bitmap_bit_p (non_ssa_vars
, DECL_UID (parm
)))
519 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
521 " Refused: need to pass non-ssa param values\n");
527 tree ddef
= ssa_default_def (cfun
, parm
);
529 && bitmap_bit_p (current
->ssa_names_to_pass
,
530 SSA_NAME_VERSION (ddef
)))
532 if (!VOID_TYPE_P (TREE_TYPE (parm
)))
533 call_overhead
+= estimate_move_cost (TREE_TYPE (parm
), false);
538 if (!VOID_TYPE_P (TREE_TYPE (current_function_decl
)))
539 call_overhead
+= estimate_move_cost (TREE_TYPE (current_function_decl
),
542 if (current
->split_size
<= call_overhead
)
544 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
546 " Refused: split size is smaller than call overhead\n");
549 if (current
->header_size
+ call_overhead
550 >= (unsigned int)(DECL_DECLARED_INLINE_P (current_function_decl
)
551 ? MAX_INLINE_INSNS_SINGLE
552 : MAX_INLINE_INSNS_AUTO
))
554 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
556 " Refused: header size is too large for inline candidate\n");
560 /* Splitting functions brings the target out of comdat group; this will
561 lead to code duplication if the function is reused by other unit.
562 Limit this duplication. This is consistent with limit in tree-sra.c
563 FIXME: with LTO we ought to be able to do better! */
564 if (DECL_ONE_ONLY (current_function_decl
)
565 && current
->split_size
>= (unsigned int) MAX_INLINE_INSNS_AUTO
)
567 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
569 " Refused: function is COMDAT and tail is too large\n");
572 /* For comdat functions also reject very small tails; those will likely get
573 inlined back and we do not want to risk the duplication overhead.
574 FIXME: with LTO we ought to be able to do better! */
575 if (DECL_ONE_ONLY (current_function_decl
)
576 && current
->split_size
577 <= (unsigned int) PARAM_VALUE (PARAM_EARLY_INLINING_INSNS
) / 2)
579 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
581 " Refused: function is COMDAT and tail is too small\n");
585 /* FIXME: we currently can pass only SSA function parameters to the split
586 arguments. Once parm_adjustment infrastructure is supported by cloning,
587 we can pass more than that. */
588 if (num_args
!= bitmap_count_bits (current
->ssa_names_to_pass
))
591 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
593 " Refused: need to pass non-param values\n");
597 /* When there are non-ssa vars used in the split region, see if they
598 are used in the header region. If so, reject the split.
599 FIXME: we can use nested function support to access both. */
600 if (!bitmap_empty_p (non_ssa_vars
)
601 && !verify_non_ssa_vars (current
, non_ssa_vars
, return_bb
))
603 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
605 " Refused: split part has non-ssa uses\n");
609 /* If the split point is dominated by a forbidden block, reject
611 if (!bitmap_empty_p (forbidden_dominators
)
612 && dominated_by_forbidden (current
->entry_bb
))
614 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
616 " Refused: split point dominated by forbidden block\n");
620 /* See if retval used by return bb is computed by header or split part.
621 When it is computed by split part, we need to produce return statement
622 in the split part and add code to header to pass it around.
624 This is bit tricky to test:
625 1) When there is no return_bb or no return value, we always pass
627 2) Invariants are always computed by caller.
628 3) For SSA we need to look if defining statement is in header or split part
629 4) For non-SSA we need to look where the var is computed. */
630 retval
= find_retval (return_bb
);
632 current
->split_part_set_retval
= true;
633 else if (is_gimple_min_invariant (retval
))
634 current
->split_part_set_retval
= false;
635 /* Special case is value returned by reference we record as if it was non-ssa
636 set to result_decl. */
637 else if (TREE_CODE (retval
) == SSA_NAME
638 && SSA_NAME_VAR (retval
)
639 && TREE_CODE (SSA_NAME_VAR (retval
)) == RESULT_DECL
640 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
641 current
->split_part_set_retval
642 = bitmap_bit_p (non_ssa_vars
, DECL_UID (SSA_NAME_VAR (retval
)));
643 else if (TREE_CODE (retval
) == SSA_NAME
)
644 current
->split_part_set_retval
645 = split_part_set_ssa_name_p (retval
, current
, return_bb
);
646 else if (TREE_CODE (retval
) == PARM_DECL
)
647 current
->split_part_set_retval
= false;
648 else if (TREE_CODE (retval
) == VAR_DECL
649 || TREE_CODE (retval
) == RESULT_DECL
)
650 current
->split_part_set_retval
651 = bitmap_bit_p (non_ssa_vars
, DECL_UID (retval
));
653 current
->split_part_set_retval
= true;
655 /* See if retbnd used by return bb is computed by header or split part. */
656 retbnd
= find_retbnd (return_bb
);
659 bool split_part_set_retbnd
660 = split_part_set_ssa_name_p (retbnd
, current
, return_bb
);
662 /* If we have both return value and bounds then keep their definitions
663 in a single function. We use SSA names to link returned bounds and
664 value and therefore do not handle cases when result is passed by
665 reference (which should not be our case anyway since bounds are
666 returned for pointers only). */
667 if ((DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
))
668 && current
->split_part_set_retval
)
669 || split_part_set_retbnd
!= current
->split_part_set_retval
)
671 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
673 " Refused: split point splits return value and bounds\n");
678 /* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
679 for the return value. If there are other PHIs, give up. */
680 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
684 for (psi
= gsi_start_phis (return_bb
); !gsi_end_p (psi
); gsi_next (&psi
))
685 if (!virtual_operand_p (gimple_phi_result (psi
.phi ()))
687 && current
->split_part_set_retval
688 && TREE_CODE (retval
) == SSA_NAME
689 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
))
690 && SSA_NAME_DEF_STMT (retval
) == psi
.phi ()))
692 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
694 " Refused: return bb has extra PHIs\n");
699 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
700 fprintf (dump_file
, " Accepted!\n");
702 /* At the moment chose split point with lowest frequency and that leaves
703 out smallest size of header.
704 In future we might re-consider this heuristics. */
705 if (!best_split_point
.split_bbs
706 || best_split_point
.entry_bb
->frequency
> current
->entry_bb
->frequency
707 || (best_split_point
.entry_bb
->frequency
== current
->entry_bb
->frequency
708 && best_split_point
.split_size
< current
->split_size
))
711 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
712 fprintf (dump_file
, " New best split point!\n");
713 if (best_split_point
.ssa_names_to_pass
)
715 BITMAP_FREE (best_split_point
.ssa_names_to_pass
);
716 BITMAP_FREE (best_split_point
.split_bbs
);
718 best_split_point
= *current
;
719 best_split_point
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
720 bitmap_copy (best_split_point
.ssa_names_to_pass
,
721 current
->ssa_names_to_pass
);
722 best_split_point
.split_bbs
= BITMAP_ALLOC (NULL
);
723 bitmap_copy (best_split_point
.split_bbs
, current
->split_bbs
);
727 /* Return basic block containing RETURN statement. We allow basic blocks
731 but return_bb can not be more complex than this (except for
732 -fsanitize=thread we allow TSAN_FUNC_EXIT () internal call in there).
733 If nothing is found, return the exit block.
735 When there are multiple RETURN statement, chose one with return value,
736 since that one is more likely shared by multiple code paths.
738 Return BB is special, because for function splitting it is the only
739 basic block that is duplicated in between header and split part of the
742 TODO: We might support multiple return blocks. */
745 find_return_bb (void)
748 basic_block return_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
);
749 gimple_stmt_iterator bsi
;
750 bool found_return
= false;
751 tree retval
= NULL_TREE
;
753 if (!single_pred_p (EXIT_BLOCK_PTR_FOR_FN (cfun
)))
756 e
= single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
));
757 for (bsi
= gsi_last_bb (e
->src
); !gsi_end_p (bsi
); gsi_prev (&bsi
))
759 gimple
*stmt
= gsi_stmt (bsi
);
760 if (gimple_code (stmt
) == GIMPLE_LABEL
761 || is_gimple_debug (stmt
)
762 || gimple_clobber_p (stmt
))
764 else if (gimple_code (stmt
) == GIMPLE_ASSIGN
766 && gimple_assign_single_p (stmt
)
767 && (auto_var_in_fn_p (gimple_assign_rhs1 (stmt
),
768 current_function_decl
)
769 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
770 && retval
== gimple_assign_lhs (stmt
))
772 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
775 retval
= gimple_return_retval (return_stmt
);
777 /* For -fsanitize=thread, allow also TSAN_FUNC_EXIT () in the return
779 else if ((flag_sanitize
& SANITIZE_THREAD
)
780 && is_gimple_call (stmt
)
781 && gimple_call_internal_p (stmt
)
782 && gimple_call_internal_fn (stmt
) == IFN_TSAN_FUNC_EXIT
)
787 if (gsi_end_p (bsi
) && found_return
)
793 /* Given return basic block RETURN_BB, see where return value is really
796 find_retval (basic_block return_bb
)
798 gimple_stmt_iterator bsi
;
799 for (bsi
= gsi_start_bb (return_bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
800 if (greturn
*return_stmt
= dyn_cast
<greturn
*> (gsi_stmt (bsi
)))
801 return gimple_return_retval (return_stmt
);
802 else if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_ASSIGN
803 && !gimple_clobber_p (gsi_stmt (bsi
)))
804 return gimple_assign_rhs1 (gsi_stmt (bsi
));
808 /* Given return basic block RETURN_BB, see where return bounds are really
811 find_retbnd (basic_block return_bb
)
813 gimple_stmt_iterator bsi
;
814 for (bsi
= gsi_last_bb (return_bb
); !gsi_end_p (bsi
); gsi_prev (&bsi
))
815 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_RETURN
)
816 return gimple_return_retbnd (gsi_stmt (bsi
));
820 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
821 variable, mark it as used in bitmap passed via DATA.
822 Return true when access to T prevents splitting the function. */
825 mark_nonssa_use (gimple
*, tree t
, tree
, void *data
)
827 t
= get_base_address (t
);
829 if (!t
|| is_gimple_reg (t
))
832 /* At present we can't pass non-SSA arguments to split function.
833 FIXME: this can be relaxed by passing references to arguments. */
834 if (TREE_CODE (t
) == PARM_DECL
)
836 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
838 "Cannot split: use of non-ssa function parameter.\n");
842 if ((TREE_CODE (t
) == VAR_DECL
843 && auto_var_in_fn_p (t
, current_function_decl
))
844 || TREE_CODE (t
) == RESULT_DECL
845 || (TREE_CODE (t
) == LABEL_DECL
846 && FORCED_LABEL (t
)))
847 bitmap_set_bit ((bitmap
)data
, DECL_UID (t
));
849 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
850 to pretend that the value pointed to is actual result decl. */
851 if ((TREE_CODE (t
) == MEM_REF
|| INDIRECT_REF_P (t
))
852 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
853 && SSA_NAME_VAR (TREE_OPERAND (t
, 0))
854 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t
, 0))) == RESULT_DECL
855 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
857 bitmap_bit_p ((bitmap
)data
,
858 DECL_UID (DECL_RESULT (current_function_decl
)));
863 /* Compute local properties of basic block BB we collect when looking for
864 split points. We look for ssa defs and store them in SET_SSA_NAMES,
865 for ssa uses and store them in USED_SSA_NAMES and for any non-SSA automatic
866 vars stored in NON_SSA_VARS.
868 When BB has edge to RETURN_BB, collect uses in RETURN_BB too.
870 Return false when BB contains something that prevents it from being put into
874 visit_bb (basic_block bb
, basic_block return_bb
,
875 bitmap set_ssa_names
, bitmap used_ssa_names
,
880 bool can_split
= true;
882 for (gimple_stmt_iterator bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
);
885 gimple
*stmt
= gsi_stmt (bsi
);
890 if (is_gimple_debug (stmt
))
893 if (gimple_clobber_p (stmt
))
896 /* FIXME: We can split regions containing EH. We can not however
897 split RESX, EH_DISPATCH and EH_POINTER referring to same region
898 into different partitions. This would require tracking of
899 EH regions and checking in consider_split_point if they
900 are not used elsewhere. */
901 if (gimple_code (stmt
) == GIMPLE_RESX
)
903 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
904 fprintf (dump_file
, "Cannot split: resx.\n");
907 if (gimple_code (stmt
) == GIMPLE_EH_DISPATCH
)
909 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
910 fprintf (dump_file
, "Cannot split: eh dispatch.\n");
914 /* Check builtins that prevent splitting. */
915 if (gimple_code (stmt
) == GIMPLE_CALL
916 && (decl
= gimple_call_fndecl (stmt
)) != NULL_TREE
917 && DECL_BUILT_IN (decl
)
918 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
919 switch (DECL_FUNCTION_CODE (decl
))
921 /* FIXME: once we will allow passing non-parm values to split part,
922 we need to be sure to handle correct builtin_stack_save and
923 builtin_stack_restore. At the moment we are safe; there is no
924 way to store builtin_stack_save result in non-SSA variable
925 since all calls to those are compiler generated. */
927 case BUILT_IN_APPLY_ARGS
:
928 case BUILT_IN_VA_START
:
929 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
931 "Cannot split: builtin_apply and va_start.\n");
934 case BUILT_IN_EH_POINTER
:
935 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
936 fprintf (dump_file
, "Cannot split: builtin_eh_pointer.\n");
943 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
944 bitmap_set_bit (set_ssa_names
, SSA_NAME_VERSION (op
));
945 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
946 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
947 can_split
&= !walk_stmt_load_store_addr_ops (stmt
, non_ssa_vars
,
952 for (gphi_iterator bsi
= gsi_start_phis (bb
); !gsi_end_p (bsi
);
955 gphi
*stmt
= bsi
.phi ();
958 if (virtual_operand_p (gimple_phi_result (stmt
)))
960 bitmap_set_bit (set_ssa_names
,
961 SSA_NAME_VERSION (gimple_phi_result (stmt
)));
962 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
964 tree op
= gimple_phi_arg_def (stmt
, i
);
965 if (TREE_CODE (op
) == SSA_NAME
)
966 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
968 can_split
&= !walk_stmt_load_store_addr_ops (stmt
, non_ssa_vars
,
973 /* Record also uses coming from PHI operand in return BB. */
974 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
975 if (e
->dest
== return_bb
)
977 for (gphi_iterator bsi
= gsi_start_phis (return_bb
);
981 gphi
*stmt
= bsi
.phi ();
982 tree op
= gimple_phi_arg_def (stmt
, e
->dest_idx
);
984 if (virtual_operand_p (gimple_phi_result (stmt
)))
986 if (TREE_CODE (op
) == SSA_NAME
)
987 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
989 can_split
&= !mark_nonssa_use (stmt
, op
, op
, non_ssa_vars
);
995 /* Stack entry for recursive DFS walk in find_split_point. */
999 /* Basic block we are examining. */
1002 /* SSA names set and used by the BB and all BBs reachable
1003 from it via DFS walk. */
1004 bitmap set_ssa_names
, used_ssa_names
;
1005 bitmap non_ssa_vars
;
1007 /* All BBS visited from this BB via DFS walk. */
1010 /* Last examined edge in DFS walk. Since we walk unoriented graph,
1011 the value is up to sum of incoming and outgoing edges of BB. */
1012 unsigned int edge_num
;
1014 /* Stack entry index of earliest BB reachable from current BB
1015 or any BB visited later in DFS walk. */
1018 /* Overall time and size of all BBs reached from this BB in DFS walk. */
1019 int overall_time
, overall_size
;
1021 /* When false we can not split on this BB. */
1026 /* Find all articulations and call consider_split on them.
1027 OVERALL_TIME and OVERALL_SIZE is time and size of the function.
1029 We perform basic algorithm for finding an articulation in a graph
1030 created from CFG by considering it to be an unoriented graph.
1032 The articulation is discovered via DFS walk. We collect earliest
1033 basic block on stack that is reachable via backward edge. Articulation
1034 is any basic block such that there is no backward edge bypassing it.
1035 To reduce stack usage we maintain heap allocated stack in STACK vector.
1036 AUX pointer of BB is set to index it appears in the stack or -1 once
1037 it is visited and popped off the stack.
1039 The algorithm finds articulation after visiting the whole component
1040 reachable by it. This makes it convenient to collect information about
1041 the component used by consider_split. */
1044 find_split_points (basic_block return_bb
, int overall_time
, int overall_size
)
1047 vec
<stack_entry
> stack
= vNULL
;
1049 struct split_point current
;
1051 current
.header_time
= overall_time
;
1052 current
.header_size
= overall_size
;
1053 current
.split_time
= 0;
1054 current
.split_size
= 0;
1055 current
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
1057 first
.bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
1059 first
.overall_time
= 0;
1060 first
.overall_size
= 0;
1061 first
.earliest
= INT_MAX
;
1062 first
.set_ssa_names
= 0;
1063 first
.used_ssa_names
= 0;
1064 first
.non_ssa_vars
= 0;
1065 first
.bbs_visited
= 0;
1066 first
.can_split
= false;
1067 stack
.safe_push (first
);
1068 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->aux
= (void *)(intptr_t)-1;
1070 while (!stack
.is_empty ())
1072 stack_entry
*entry
= &stack
.last ();
1074 /* We are walking an acyclic graph, so edge_num counts
1075 succ and pred edges together. However when considering
1076 articulation, we want to have processed everything reachable
1077 from articulation but nothing that reaches into it. */
1078 if (entry
->edge_num
== EDGE_COUNT (entry
->bb
->succs
)
1079 && entry
->bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1081 int pos
= stack
.length ();
1082 entry
->can_split
&= visit_bb (entry
->bb
, return_bb
,
1083 entry
->set_ssa_names
,
1084 entry
->used_ssa_names
,
1085 entry
->non_ssa_vars
);
1086 if (pos
<= entry
->earliest
&& !entry
->can_split
1087 && dump_file
&& (dump_flags
& TDF_DETAILS
))
1089 "found articulation at bb %i but can not split\n",
1091 if (pos
<= entry
->earliest
&& entry
->can_split
)
1093 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1094 fprintf (dump_file
, "found articulation at bb %i\n",
1096 current
.entry_bb
= entry
->bb
;
1097 current
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
1098 bitmap_and_compl (current
.ssa_names_to_pass
,
1099 entry
->used_ssa_names
, entry
->set_ssa_names
);
1100 current
.header_time
= overall_time
- entry
->overall_time
;
1101 current
.header_size
= overall_size
- entry
->overall_size
;
1102 current
.split_time
= entry
->overall_time
;
1103 current
.split_size
= entry
->overall_size
;
1104 current
.split_bbs
= entry
->bbs_visited
;
1105 consider_split (¤t
, entry
->non_ssa_vars
, return_bb
);
1106 BITMAP_FREE (current
.ssa_names_to_pass
);
1109 /* Do actual DFS walk. */
1111 < (EDGE_COUNT (entry
->bb
->succs
)
1112 + EDGE_COUNT (entry
->bb
->preds
)))
1116 if (entry
->edge_num
< EDGE_COUNT (entry
->bb
->succs
))
1118 e
= EDGE_SUCC (entry
->bb
, entry
->edge_num
);
1123 e
= EDGE_PRED (entry
->bb
, entry
->edge_num
1124 - EDGE_COUNT (entry
->bb
->succs
));
1130 /* New BB to visit, push it to the stack. */
1131 if (dest
!= return_bb
&& dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
1134 stack_entry new_entry
;
1136 new_entry
.bb
= dest
;
1137 new_entry
.edge_num
= 0;
1138 new_entry
.overall_time
1139 = bb_info_vec
[dest
->index
].time
;
1140 new_entry
.overall_size
1141 = bb_info_vec
[dest
->index
].size
;
1142 new_entry
.earliest
= INT_MAX
;
1143 new_entry
.set_ssa_names
= BITMAP_ALLOC (NULL
);
1144 new_entry
.used_ssa_names
= BITMAP_ALLOC (NULL
);
1145 new_entry
.bbs_visited
= BITMAP_ALLOC (NULL
);
1146 new_entry
.non_ssa_vars
= BITMAP_ALLOC (NULL
);
1147 new_entry
.can_split
= true;
1148 bitmap_set_bit (new_entry
.bbs_visited
, dest
->index
);
1149 stack
.safe_push (new_entry
);
1150 dest
->aux
= (void *)(intptr_t)stack
.length ();
1152 /* Back edge found, record the earliest point. */
1153 else if ((intptr_t)dest
->aux
> 0
1154 && (intptr_t)dest
->aux
< entry
->earliest
)
1155 entry
->earliest
= (intptr_t)dest
->aux
;
1157 /* We are done with examining the edges. Pop off the value from stack
1158 and merge stuff we accumulate during the walk. */
1159 else if (entry
->bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1161 stack_entry
*prev
= &stack
[stack
.length () - 2];
1163 entry
->bb
->aux
= (void *)(intptr_t)-1;
1164 prev
->can_split
&= entry
->can_split
;
1165 if (prev
->set_ssa_names
)
1167 bitmap_ior_into (prev
->set_ssa_names
, entry
->set_ssa_names
);
1168 bitmap_ior_into (prev
->used_ssa_names
, entry
->used_ssa_names
);
1169 bitmap_ior_into (prev
->bbs_visited
, entry
->bbs_visited
);
1170 bitmap_ior_into (prev
->non_ssa_vars
, entry
->non_ssa_vars
);
1172 if (prev
->earliest
> entry
->earliest
)
1173 prev
->earliest
= entry
->earliest
;
1174 prev
->overall_time
+= entry
->overall_time
;
1175 prev
->overall_size
+= entry
->overall_size
;
1176 BITMAP_FREE (entry
->set_ssa_names
);
1177 BITMAP_FREE (entry
->used_ssa_names
);
1178 BITMAP_FREE (entry
->bbs_visited
);
1179 BITMAP_FREE (entry
->non_ssa_vars
);
1185 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->aux
= NULL
;
1186 FOR_EACH_BB_FN (bb
, cfun
)
1189 BITMAP_FREE (current
.ssa_names_to_pass
);
1192 /* Split function at SPLIT_POINT. */
1195 split_function (basic_block return_bb
, struct split_point
*split_point
,
1196 bool add_tsan_func_exit
)
1198 vec
<tree
> args_to_pass
= vNULL
;
1199 bitmap args_to_skip
;
1202 cgraph_node
*node
, *cur_node
= cgraph_node::get (current_function_decl
);
1203 basic_block call_bb
;
1204 gcall
*call
, *tsan_func_exit_call
= NULL
;
1207 tree retval
= NULL
, real_retval
= NULL
, retbnd
= NULL
;
1208 bool with_bounds
= chkp_function_instrumented_p (current_function_decl
);
1209 gimple
*last_stmt
= NULL
;
1215 fprintf (dump_file
, "\n\nSplitting function at:\n");
1216 dump_split_point (dump_file
, split_point
);
1219 if (cur_node
->local
.can_change_signature
)
1220 args_to_skip
= BITMAP_ALLOC (NULL
);
1222 args_to_skip
= NULL
;
1224 /* Collect the parameters of new function and args_to_skip bitmap. */
1225 for (parm
= DECL_ARGUMENTS (current_function_decl
);
1226 parm
; parm
= DECL_CHAIN (parm
), num
++)
1228 && (!is_gimple_reg (parm
)
1229 || (ddef
= ssa_default_def (cfun
, parm
)) == NULL_TREE
1230 || !bitmap_bit_p (split_point
->ssa_names_to_pass
,
1231 SSA_NAME_VERSION (ddef
))))
1232 bitmap_set_bit (args_to_skip
, num
);
1235 /* This parm might not have been used up to now, but is going to be
1236 used, hence register it. */
1237 if (is_gimple_reg (parm
))
1238 arg
= get_or_create_ssa_default_def (cfun
, parm
);
1242 if (!useless_type_conversion_p (DECL_ARG_TYPE (parm
), TREE_TYPE (arg
)))
1243 arg
= fold_convert (DECL_ARG_TYPE (parm
), arg
);
1244 args_to_pass
.safe_push (arg
);
1247 /* See if the split function or the main part will return. */
1248 bool main_part_return_p
= false;
1249 bool split_part_return_p
= false;
1250 FOR_EACH_EDGE (e
, ei
, return_bb
->preds
)
1252 if (bitmap_bit_p (split_point
->split_bbs
, e
->src
->index
))
1253 split_part_return_p
= true;
1255 main_part_return_p
= true;
1257 /* The main part also returns if we we split on a fallthru edge
1258 and the split part returns. */
1259 if (split_part_return_p
)
1260 FOR_EACH_EDGE (e
, ei
, split_point
->entry_bb
->preds
)
1262 if (! bitmap_bit_p (split_point
->split_bbs
, e
->src
->index
)
1263 && single_succ_p (e
->src
))
1265 main_part_return_p
= true;
1270 /* Add return block to what will become the split function.
1271 We do not return; no return block is needed. */
1272 if (!split_part_return_p
)
1274 /* We have no return block, so nothing is needed. */
1275 else if (return_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1277 /* When we do not want to return value, we need to construct
1278 new return block with empty return statement.
1279 FIXME: Once we are able to change return type, we should change function
1280 to return void instead of just outputting function with undefined return
1281 value. For structures this affects quality of codegen. */
1282 else if (!split_point
->split_part_set_retval
1283 && (retval
= find_retval (return_bb
)))
1285 bool redirected
= true;
1286 basic_block new_return_bb
= create_basic_block (NULL
, 0, return_bb
);
1287 gimple_stmt_iterator gsi
= gsi_start_bb (new_return_bb
);
1288 gsi_insert_after (&gsi
, gimple_build_return (NULL
), GSI_NEW_STMT
);
1292 FOR_EACH_EDGE (e
, ei
, return_bb
->preds
)
1293 if (bitmap_bit_p (split_point
->split_bbs
, e
->src
->index
))
1295 new_return_bb
->count
+= e
->count
;
1296 new_return_bb
->frequency
+= EDGE_FREQUENCY (e
);
1297 redirect_edge_and_branch (e
, new_return_bb
);
1302 e
= make_edge (new_return_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
1303 e
->probability
= REG_BR_PROB_BASE
;
1304 e
->count
= new_return_bb
->count
;
1305 add_bb_to_loop (new_return_bb
, current_loops
->tree_root
);
1306 bitmap_set_bit (split_point
->split_bbs
, new_return_bb
->index
);
1307 retbnd
= find_retbnd (return_bb
);
1309 /* When we pass around the value, use existing return block. */
1311 bitmap_set_bit (split_point
->split_bbs
, return_bb
->index
);
1313 /* If the main part doesn't return pretend the return block wasn't
1314 found for all of the following. */
1315 if (! main_part_return_p
)
1316 return_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
);
1318 /* If RETURN_BB has virtual operand PHIs, they must be removed and the
1319 virtual operand marked for renaming as we change the CFG in a way that
1320 tree-inline is not able to compensate for.
1322 Note this can happen whether or not we have a return value. If we have
1323 a return value, then RETURN_BB may have PHIs for real operands too. */
1324 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1327 for (gphi_iterator gsi
= gsi_start_phis (return_bb
);
1330 gphi
*stmt
= gsi
.phi ();
1331 if (!virtual_operand_p (gimple_phi_result (stmt
)))
1336 mark_virtual_phi_result_for_renaming (stmt
);
1337 remove_phi_node (&gsi
, true);
1340 /* In reality we have to rename the reaching definition of the
1341 virtual operand at return_bb as we will eventually release it
1342 when we remove the code region we outlined.
1343 So we have to rename all immediate virtual uses of that region
1344 if we didn't see a PHI definition yet. */
1345 /* ??? In real reality we want to set the reaching vdef of the
1346 entry of the SESE region as the vuse of the call and the reaching
1347 vdef of the exit of the SESE region as the vdef of the call. */
1349 for (gimple_stmt_iterator gsi
= gsi_start_bb (return_bb
);
1353 gimple
*stmt
= gsi_stmt (gsi
);
1354 if (gimple_vuse (stmt
))
1356 gimple_set_vuse (stmt
, NULL_TREE
);
1359 if (gimple_vdef (stmt
))
1364 /* Now create the actual clone. */
1365 cgraph_edge::rebuild_edges ();
1366 node
= cur_node
->create_version_clone_with_body
1367 (vNULL
, NULL
, args_to_skip
, !split_part_return_p
, split_point
->split_bbs
,
1368 split_point
->entry_bb
, "part");
1370 node
->split_part
= true;
1372 /* Let's take a time profile for splitted function. */
1373 node
->tp_first_run
= cur_node
->tp_first_run
+ 1;
1375 /* For usual cloning it is enough to clear builtin only when signature
1376 changes. For partial inlining we however can not expect the part
1377 of builtin implementation to have same semantic as the whole. */
1378 if (DECL_BUILT_IN (node
->decl
))
1380 DECL_BUILT_IN_CLASS (node
->decl
) = NOT_BUILT_IN
;
1381 DECL_FUNCTION_CODE (node
->decl
) = (enum built_in_function
) 0;
1384 /* If the original function is instrumented then it's
1385 part is also instrumented. */
1387 chkp_function_mark_instrumented (node
->decl
);
1389 /* If the original function is declared inline, there is no point in issuing
1390 a warning for the non-inlinable part. */
1391 DECL_NO_INLINE_WARNING_P (node
->decl
) = 1;
1392 cur_node
->remove_callees ();
1393 cur_node
->remove_all_references ();
1394 if (!split_part_return_p
)
1395 TREE_THIS_VOLATILE (node
->decl
) = 1;
1397 dump_function_to_file (node
->decl
, dump_file
, dump_flags
);
1399 /* Create the basic block we place call into. It is the entry basic block
1400 split after last label. */
1401 call_bb
= split_point
->entry_bb
;
1402 for (gimple_stmt_iterator gsi
= gsi_start_bb (call_bb
); !gsi_end_p (gsi
);)
1403 if (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
)
1405 last_stmt
= gsi_stmt (gsi
);
1410 e
= split_block (split_point
->entry_bb
, last_stmt
);
1413 /* Produce the call statement. */
1414 gimple_stmt_iterator gsi
= gsi_last_bb (call_bb
);
1415 FOR_EACH_VEC_ELT (args_to_pass
, i
, arg
)
1416 if (!is_gimple_val (arg
))
1418 arg
= force_gimple_operand_gsi (&gsi
, arg
, true, NULL_TREE
,
1419 false, GSI_CONTINUE_LINKING
);
1420 args_to_pass
[i
] = arg
;
1422 call
= gimple_build_call_vec (node
->decl
, args_to_pass
);
1423 gimple_call_set_with_bounds (call
, with_bounds
);
1424 gimple_set_block (call
, DECL_INITIAL (current_function_decl
));
1425 args_to_pass
.release ();
1427 /* For optimized away parameters, add on the caller side
1429 DEBUG D#X => parm_Y(D)
1430 stmts and associate D#X with parm in decl_debug_args_lookup
1431 vector to say for debug info that if parameter parm had been passed,
1432 it would have value parm_Y(D). */
1435 vec
<tree
, va_gc
> **debug_args
= NULL
;
1436 unsigned i
= 0, len
= 0;
1437 if (MAY_HAVE_DEBUG_STMTS
)
1439 debug_args
= decl_debug_args_lookup (node
->decl
);
1441 len
= vec_safe_length (*debug_args
);
1443 for (parm
= DECL_ARGUMENTS (current_function_decl
), num
= 0;
1444 parm
; parm
= DECL_CHAIN (parm
), num
++)
1445 if (bitmap_bit_p (args_to_skip
, num
) && is_gimple_reg (parm
))
1450 /* This needs to be done even without MAY_HAVE_DEBUG_STMTS,
1451 otherwise if it didn't exist before, we'd end up with
1452 different SSA_NAME_VERSIONs between -g and -g0. */
1453 arg
= get_or_create_ssa_default_def (cfun
, parm
);
1454 if (!MAY_HAVE_DEBUG_STMTS
|| debug_args
== NULL
)
1457 while (i
< len
&& (**debug_args
)[i
] != DECL_ORIGIN (parm
))
1461 ddecl
= (**debug_args
)[i
+ 1];
1463 = gimple_build_debug_bind (ddecl
, unshare_expr (arg
), call
);
1464 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
1468 /* We avoid address being taken on any variable used by split part,
1469 so return slot optimization is always possible. Moreover this is
1470 required to make DECL_BY_REFERENCE work. */
1471 if (aggregate_value_p (DECL_RESULT (current_function_decl
),
1472 TREE_TYPE (current_function_decl
))
1473 && (!is_gimple_reg_type (TREE_TYPE (DECL_RESULT (current_function_decl
)))
1474 || DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
))))
1475 gimple_call_set_return_slot_opt (call
, true);
1477 if (add_tsan_func_exit
)
1478 tsan_func_exit_call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
1480 /* Update return value. This is bit tricky. When we do not return,
1481 do nothing. When we return we might need to update return_bb
1482 or produce a new return statement. */
1483 if (!split_part_return_p
)
1485 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1486 if (tsan_func_exit_call
)
1487 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1491 e
= make_edge (call_bb
, return_bb
,
1492 return_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
1493 ? 0 : EDGE_FALLTHRU
);
1494 e
->count
= call_bb
->count
;
1495 e
->probability
= REG_BR_PROB_BASE
;
1497 /* If there is return basic block, see what value we need to store
1498 return value into and put call just before it. */
1499 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1501 real_retval
= retval
= find_retval (return_bb
);
1502 retbnd
= find_retbnd (return_bb
);
1504 if (real_retval
&& split_point
->split_part_set_retval
)
1508 /* See if we need new SSA_NAME for the result.
1509 When DECL_BY_REFERENCE is true, retval is actually pointer to
1510 return value and it is constant in whole function. */
1511 if (TREE_CODE (retval
) == SSA_NAME
1512 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1514 retval
= copy_ssa_name (retval
, call
);
1516 /* See if there is PHI defining return value. */
1517 for (psi
= gsi_start_phis (return_bb
);
1518 !gsi_end_p (psi
); gsi_next (&psi
))
1519 if (!virtual_operand_p (gimple_phi_result (psi
.phi ())))
1522 /* When there is PHI, just update its value. */
1523 if (TREE_CODE (retval
) == SSA_NAME
1524 && !gsi_end_p (psi
))
1525 add_phi_arg (psi
.phi (), retval
, e
, UNKNOWN_LOCATION
);
1526 /* Otherwise update the return BB itself.
1527 find_return_bb allows at most one assignment to return value,
1528 so update first statement. */
1531 gimple_stmt_iterator bsi
;
1532 for (bsi
= gsi_start_bb (return_bb
); !gsi_end_p (bsi
);
1534 if (greturn
*return_stmt
1535 = dyn_cast
<greturn
*> (gsi_stmt (bsi
)))
1537 gimple_return_set_retval (return_stmt
, retval
);
1540 else if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_ASSIGN
1541 && !gimple_clobber_p (gsi_stmt (bsi
)))
1543 gimple_assign_set_rhs1 (gsi_stmt (bsi
), retval
);
1546 update_stmt (gsi_stmt (bsi
));
1549 /* Replace retbnd with new one. */
1552 gimple_stmt_iterator bsi
;
1553 for (bsi
= gsi_last_bb (return_bb
); !gsi_end_p (bsi
);
1555 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_RETURN
)
1557 retbnd
= copy_ssa_name (retbnd
, call
);
1558 gimple_return_set_retbnd (gsi_stmt (bsi
), retbnd
);
1559 update_stmt (gsi_stmt (bsi
));
1564 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1566 gimple_call_set_lhs (call
, build_simple_mem_ref (retval
));
1567 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1572 restype
= TREE_TYPE (DECL_RESULT (current_function_decl
));
1573 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1574 if (!useless_type_conversion_p (TREE_TYPE (retval
), restype
))
1577 tree tem
= create_tmp_reg (restype
);
1578 tem
= make_ssa_name (tem
, call
);
1579 cpy
= gimple_build_assign (retval
, NOP_EXPR
, tem
);
1580 gsi_insert_after (&gsi
, cpy
, GSI_NEW_STMT
);
1583 /* Build bndret call to obtain returned bounds. */
1585 chkp_insert_retbnd_call (retbnd
, retval
, &gsi
);
1586 gimple_call_set_lhs (call
, retval
);
1591 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1592 if (tsan_func_exit_call
)
1593 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1595 /* We don't use return block (there is either no return in function or
1596 multiple of them). So create new basic block with return statement.
1601 if (split_point
->split_part_set_retval
1602 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1604 retval
= DECL_RESULT (current_function_decl
);
1606 if (chkp_function_instrumented_p (current_function_decl
)
1607 && BOUNDED_P (retval
))
1608 retbnd
= create_tmp_reg (pointer_bounds_type_node
);
1610 /* We use temporary register to hold value when aggregate_value_p
1611 is false. Similarly for DECL_BY_REFERENCE we must avoid extra
1613 if (!aggregate_value_p (retval
, TREE_TYPE (current_function_decl
))
1614 && !DECL_BY_REFERENCE (retval
))
1615 retval
= create_tmp_reg (TREE_TYPE (retval
));
1616 if (is_gimple_reg (retval
))
1618 /* When returning by reference, there is only one SSA name
1619 assigned to RESULT_DECL (that is pointer to return value).
1620 Look it up or create new one if it is missing. */
1621 if (DECL_BY_REFERENCE (retval
))
1622 retval
= get_or_create_ssa_default_def (cfun
, retval
);
1623 /* Otherwise produce new SSA name for return value. */
1625 retval
= make_ssa_name (retval
, call
);
1627 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1628 gimple_call_set_lhs (call
, build_simple_mem_ref (retval
));
1630 gimple_call_set_lhs (call
, retval
);
1632 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1633 /* Build bndret call to obtain returned bounds. */
1635 chkp_insert_retbnd_call (retbnd
, retval
, &gsi
);
1636 if (tsan_func_exit_call
)
1637 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1638 ret
= gimple_build_return (retval
);
1639 gsi_insert_after (&gsi
, ret
, GSI_NEW_STMT
);
1642 free_dominance_info (CDI_DOMINATORS
);
1643 free_dominance_info (CDI_POST_DOMINATORS
);
1644 compute_inline_parameters (node
, true);
1647 /* Execute function splitting pass. */
1650 execute_split_functions (void)
1652 gimple_stmt_iterator bsi
;
1654 int overall_time
= 0, overall_size
= 0;
1656 struct cgraph_node
*node
= cgraph_node::get (current_function_decl
);
1658 if (flags_from_decl_or_type (current_function_decl
)
1659 & (ECF_NORETURN
|ECF_MALLOC
))
1662 fprintf (dump_file
, "Not splitting: noreturn/malloc function.\n");
1665 if (MAIN_NAME_P (DECL_NAME (current_function_decl
)))
1668 fprintf (dump_file
, "Not splitting: main function.\n");
1671 /* This can be relaxed; function might become inlinable after splitting
1672 away the uninlinable part. */
1673 if (inline_edge_summary_vec
.exists ()
1674 && !inline_summaries
->get (node
)->inlinable
)
1677 fprintf (dump_file
, "Not splitting: not inlinable.\n");
1680 if (DECL_DISREGARD_INLINE_LIMITS (node
->decl
))
1683 fprintf (dump_file
, "Not splitting: disregarding inline limits.\n");
1686 /* This can be relaxed; most of versioning tests actually prevents
1688 if (!tree_versionable_function_p (current_function_decl
))
1691 fprintf (dump_file
, "Not splitting: not versionable.\n");
1694 /* FIXME: we could support this. */
1695 if (DECL_STRUCT_FUNCTION (current_function_decl
)->static_chain_decl
)
1698 fprintf (dump_file
, "Not splitting: nested function.\n");
1702 /* See if it makes sense to try to split.
1703 It makes sense to split if we inline, that is if we have direct calls to
1704 handle or direct calls are possibly going to appear as result of indirect
1705 inlining or LTO. Also handle -fprofile-generate as LTO to allow non-LTO
1706 training for LTO -fprofile-use build.
1708 Note that we are not completely conservative about disqualifying functions
1709 called once. It is possible that the caller is called more then once and
1710 then inlining would still benefit. */
1712 /* Local functions called once will be completely inlined most of time. */
1713 || (!node
->callers
->next_caller
&& node
->local
.local
))
1714 && !node
->address_taken
1715 && !node
->has_aliases_p ()
1716 && (!flag_lto
|| !node
->externally_visible
))
1719 fprintf (dump_file
, "Not splitting: not called directly "
1720 "or called once.\n");
1724 /* FIXME: We can actually split if splitting reduces call overhead. */
1725 if (!flag_inline_small_functions
1726 && !DECL_DECLARED_INLINE_P (current_function_decl
))
1729 fprintf (dump_file
, "Not splitting: not autoinlining and function"
1730 " is not inline.\n");
1734 /* We enforce splitting after loop headers when profile info is not
1736 if (profile_status_for_fn (cfun
) != PROFILE_READ
)
1737 mark_dfs_back_edges ();
1739 /* Initialize bitmap to track forbidden calls. */
1740 forbidden_dominators
= BITMAP_ALLOC (NULL
);
1741 calculate_dominance_info (CDI_DOMINATORS
);
1743 /* Compute local info about basic blocks and determine function size/time. */
1744 bb_info_vec
.safe_grow_cleared (last_basic_block_for_fn (cfun
) + 1);
1745 memset (&best_split_point
, 0, sizeof (best_split_point
));
1746 basic_block return_bb
= find_return_bb ();
1747 int tsan_exit_found
= -1;
1748 FOR_EACH_BB_FN (bb
, cfun
)
1752 int freq
= compute_call_stmt_bb_frequency (current_function_decl
, bb
);
1754 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1755 fprintf (dump_file
, "Basic block %i\n", bb
->index
);
1757 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
1759 int this_time
, this_size
;
1760 gimple
*stmt
= gsi_stmt (bsi
);
1762 this_size
= estimate_num_insns (stmt
, &eni_size_weights
);
1763 this_time
= estimate_num_insns (stmt
, &eni_time_weights
) * freq
;
1766 check_forbidden_calls (stmt
);
1768 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1770 fprintf (dump_file
, " freq:%6i size:%3i time:%3i ",
1771 freq
, this_size
, this_time
);
1772 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1775 if ((flag_sanitize
& SANITIZE_THREAD
)
1776 && is_gimple_call (stmt
)
1777 && gimple_call_internal_p (stmt
)
1778 && gimple_call_internal_fn (stmt
) == IFN_TSAN_FUNC_EXIT
)
1780 /* We handle TSAN_FUNC_EXIT for splitting either in the
1781 return_bb, or in its immediate predecessors. */
1782 if ((bb
!= return_bb
&& !find_edge (bb
, return_bb
))
1783 || (tsan_exit_found
!= -1
1784 && tsan_exit_found
!= (bb
!= return_bb
)))
1787 fprintf (dump_file
, "Not splitting: TSAN_FUNC_EXIT"
1788 " in unexpected basic block.\n");
1789 BITMAP_FREE (forbidden_dominators
);
1790 bb_info_vec
.release ();
1793 tsan_exit_found
= bb
!= return_bb
;
1796 overall_time
+= time
;
1797 overall_size
+= size
;
1798 bb_info_vec
[bb
->index
].time
= time
;
1799 bb_info_vec
[bb
->index
].size
= size
;
1801 find_split_points (return_bb
, overall_time
, overall_size
);
1802 if (best_split_point
.split_bbs
)
1804 split_function (return_bb
, &best_split_point
, tsan_exit_found
== 1);
1805 BITMAP_FREE (best_split_point
.ssa_names_to_pass
);
1806 BITMAP_FREE (best_split_point
.split_bbs
);
1807 todo
= TODO_update_ssa
| TODO_cleanup_cfg
;
1809 BITMAP_FREE (forbidden_dominators
);
1810 bb_info_vec
.release ();
1816 const pass_data pass_data_split_functions
=
1818 GIMPLE_PASS
, /* type */
1819 "fnsplit", /* name */
1820 OPTGROUP_NONE
, /* optinfo_flags */
1821 TV_IPA_FNSPLIT
, /* tv_id */
1822 PROP_cfg
, /* properties_required */
1823 0, /* properties_provided */
1824 0, /* properties_destroyed */
1825 0, /* todo_flags_start */
1826 0, /* todo_flags_finish */
1829 class pass_split_functions
: public gimple_opt_pass
1832 pass_split_functions (gcc::context
*ctxt
)
1833 : gimple_opt_pass (pass_data_split_functions
, ctxt
)
1836 /* opt_pass methods: */
1837 virtual bool gate (function
*);
1838 virtual unsigned int execute (function
*)
1840 return execute_split_functions ();
1843 }; // class pass_split_functions
1846 pass_split_functions::gate (function
*)
1848 /* When doing profile feedback, we want to execute the pass after profiling
1849 is read. So disable one in early optimization. */
1850 return (flag_partial_inlining
1851 && !profile_arc_flag
&& !flag_branch_probabilities
);
1857 make_pass_split_functions (gcc::context
*ctxt
)
1859 return new pass_split_functions (ctxt
);
1862 /* Execute function splitting pass. */
1865 execute_feedback_split_functions (void)
1867 unsigned int retval
= execute_split_functions ();
1869 retval
|= TODO_rebuild_cgraph_edges
;
1875 const pass_data pass_data_feedback_split_functions
=
1877 GIMPLE_PASS
, /* type */
1878 "feedback_fnsplit", /* name */
1879 OPTGROUP_NONE
, /* optinfo_flags */
1880 TV_IPA_FNSPLIT
, /* tv_id */
1881 PROP_cfg
, /* properties_required */
1882 0, /* properties_provided */
1883 0, /* properties_destroyed */
1884 0, /* todo_flags_start */
1885 0, /* todo_flags_finish */
1888 class pass_feedback_split_functions
: public gimple_opt_pass
1891 pass_feedback_split_functions (gcc::context
*ctxt
)
1892 : gimple_opt_pass (pass_data_feedback_split_functions
, ctxt
)
1895 /* opt_pass methods: */
1896 virtual bool gate (function
*);
1897 virtual unsigned int execute (function
*)
1899 return execute_feedback_split_functions ();
1902 }; // class pass_feedback_split_functions
1905 pass_feedback_split_functions::gate (function
*)
1907 /* We don't need to split when profiling at all, we are producing
1908 lousy code anyway. */
1909 return (flag_partial_inlining
1910 && flag_branch_probabilities
);
1916 make_pass_feedback_split_functions (gcc::context
*ctxt
)
1918 return new pass_feedback_split_functions (ctxt
);