1 /* Function splitting pass
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka <jh@suse.cz>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* The purpose of this pass is to split function bodies to improve
22 inlining. I.e. for function of the form:
47 When func becomes inlinable and when cheap_test is often true, inlining func,
48 but not fund.part leads to performance improvement similar as inlining
49 original func while the code size growth is smaller.
51 The pass is organized in three stages:
52 1) Collect local info about basic block into BB_INFO structure and
53 compute function body estimated size and time.
54 2) Via DFS walk find all possible basic blocks where we can split
56 3) If split point is found, split at the specified BB by creating a clone
57 and updating function to call it.
59 The decisions what functions to split are in execute_split_functions
62 There are several possible future improvements for this pass including:
64 1) Splitting to break up large functions
65 2) Splitting to reduce stack frame usage
66 3) Allow split part of function to use values computed in the header part.
67 The values needs to be passed to split function, perhaps via same
68 interface as for nested functions or as argument.
69 4) Support for simple rematerialization. I.e. when split part use
70 value computed in header from function parameter in very cheap way, we
71 can just recompute it.
72 5) Support splitting of nested functions.
73 6) Support non-SSA arguments.
74 7) There is nothing preventing us from producing multiple parts of single function
75 when needed or splitting also the parts. */
79 #include "coretypes.h"
83 #include "double-int.h"
91 #include "fold-const.h"
94 #include "hard-reg-set.h"
96 #include "dominance.h"
99 #include "basic-block.h"
100 #include "tree-ssa-alias.h"
101 #include "internal-fn.h"
102 #include "gimple-expr.h"
105 #include "stringpool.h"
109 #include "statistics.h"
111 #include "fixed-value.h"
112 #include "insn-config.h"
117 #include "emit-rtl.h"
121 #include "gimplify.h"
122 #include "gimple-iterator.h"
123 #include "gimplify-me.h"
124 #include "gimple-walk.h"
126 #include "hash-map.h"
127 #include "plugin-api.h"
130 #include "alloc-pool.h"
131 #include "symbol-summary.h"
132 #include "ipa-prop.h"
133 #include "gimple-ssa.h"
134 #include "tree-cfg.h"
135 #include "tree-phinodes.h"
136 #include "ssa-iterators.h"
137 #include "tree-ssanames.h"
138 #include "tree-into-ssa.h"
139 #include "tree-dfa.h"
140 #include "tree-pass.h"
141 #include "diagnostic.h"
142 #include "tree-dump.h"
143 #include "tree-inline.h"
145 #include "gimple-pretty-print.h"
146 #include "ipa-inline.h"
148 #include "tree-chkp.h"
150 /* Per basic block info. */
158 static vec
<split_bb_info
> bb_info_vec
;
160 /* Description of split point. */
164 /* Size of the partitions. */
165 unsigned int header_time
, header_size
, split_time
, split_size
;
167 /* SSA names that need to be passed into spit function. */
168 bitmap ssa_names_to_pass
;
170 /* Basic block where we split (that will become entry point of new function. */
171 basic_block entry_bb
;
173 /* Basic blocks we are splitting away. */
176 /* True when return value is computed on split part and thus it needs
178 bool split_part_set_retval
;
181 /* Best split point found. */
183 struct split_point best_split_point
;
185 /* Set of basic blocks that are not allowed to dominate a split point. */
187 static bitmap forbidden_dominators
;
189 static tree
find_retval (basic_block return_bb
);
190 static tree
find_retbnd (basic_block return_bb
);
192 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
193 variable, check it if it is present in bitmap passed via DATA. */
196 test_nonssa_use (gimple
, tree t
, tree
, void *data
)
198 t
= get_base_address (t
);
200 if (!t
|| is_gimple_reg (t
))
203 if (TREE_CODE (t
) == PARM_DECL
204 || (TREE_CODE (t
) == VAR_DECL
205 && auto_var_in_fn_p (t
, current_function_decl
))
206 || TREE_CODE (t
) == RESULT_DECL
207 /* Normal labels are part of CFG and will be handled gratefuly.
208 Forced labels however can be used directly by statements and
209 need to stay in one partition along with their uses. */
210 || (TREE_CODE (t
) == LABEL_DECL
211 && FORCED_LABEL (t
)))
212 return bitmap_bit_p ((bitmap
)data
, DECL_UID (t
));
214 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
215 to pretend that the value pointed to is actual result decl. */
216 if ((TREE_CODE (t
) == MEM_REF
|| INDIRECT_REF_P (t
))
217 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
218 && SSA_NAME_VAR (TREE_OPERAND (t
, 0))
219 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t
, 0))) == RESULT_DECL
220 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
222 bitmap_bit_p ((bitmap
)data
,
223 DECL_UID (DECL_RESULT (current_function_decl
)));
228 /* Dump split point CURRENT. */
231 dump_split_point (FILE * file
, struct split_point
*current
)
234 "Split point at BB %i\n"
235 " header time: %i header size: %i\n"
236 " split time: %i split size: %i\n bbs: ",
237 current
->entry_bb
->index
, current
->header_time
,
238 current
->header_size
, current
->split_time
, current
->split_size
);
239 dump_bitmap (file
, current
->split_bbs
);
240 fprintf (file
, " SSA names to pass: ");
241 dump_bitmap (file
, current
->ssa_names_to_pass
);
244 /* Look for all BBs in header that might lead to the split part and verify
245 that they are not defining any non-SSA var used by the split part.
246 Parameters are the same as for consider_split. */
249 verify_non_ssa_vars (struct split_point
*current
, bitmap non_ssa_vars
,
250 basic_block return_bb
)
252 bitmap seen
= BITMAP_ALLOC (NULL
);
253 vec
<basic_block
> worklist
= vNULL
;
259 FOR_EACH_EDGE (e
, ei
, current
->entry_bb
->preds
)
260 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
261 && !bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
263 worklist
.safe_push (e
->src
);
264 bitmap_set_bit (seen
, e
->src
->index
);
267 while (!worklist
.is_empty ())
269 bb
= worklist
.pop ();
270 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
271 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
272 && bitmap_set_bit (seen
, e
->src
->index
))
274 gcc_checking_assert (!bitmap_bit_p (current
->split_bbs
,
276 worklist
.safe_push (e
->src
);
278 for (gimple_stmt_iterator bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
);
281 gimple stmt
= gsi_stmt (bsi
);
282 if (is_gimple_debug (stmt
))
284 if (walk_stmt_load_store_addr_ops
285 (stmt
, non_ssa_vars
, test_nonssa_use
, test_nonssa_use
,
291 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
292 if (test_nonssa_use (stmt
, gimple_label_label (label_stmt
),
293 NULL_TREE
, non_ssa_vars
))
299 for (gphi_iterator bsi
= gsi_start_phis (bb
); !gsi_end_p (bsi
);
302 if (walk_stmt_load_store_addr_ops
303 (gsi_stmt (bsi
), non_ssa_vars
, test_nonssa_use
, test_nonssa_use
,
310 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
312 if (e
->dest
!= return_bb
)
314 for (gphi_iterator bsi
= gsi_start_phis (return_bb
);
318 gphi
*stmt
= bsi
.phi ();
319 tree op
= gimple_phi_arg_def (stmt
, e
->dest_idx
);
321 if (virtual_operand_p (gimple_phi_result (stmt
)))
323 if (TREE_CODE (op
) != SSA_NAME
324 && test_nonssa_use (stmt
, op
, op
, non_ssa_vars
))
333 /* Verify that the rest of function does not define any label
334 used by the split part. */
335 FOR_EACH_BB_FN (bb
, cfun
)
336 if (!bitmap_bit_p (current
->split_bbs
, bb
->index
)
337 && !bitmap_bit_p (seen
, bb
->index
))
339 gimple_stmt_iterator bsi
;
340 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
341 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (bsi
)))
343 if (test_nonssa_use (label_stmt
,
344 gimple_label_label (label_stmt
),
345 NULL_TREE
, non_ssa_vars
))
361 /* If STMT is a call, check the callee against a list of forbidden
362 predicate functions. If a match is found, look for uses of the
363 call result in condition statements that compare against zero.
364 For each such use, find the block targeted by the condition
365 statement for the nonzero result, and set the bit for this block
366 in the forbidden dominators bitmap. The purpose of this is to avoid
367 selecting a split point where we are likely to lose the chance
368 to optimize away an unused function call. */
371 check_forbidden_calls (gimple stmt
)
373 imm_use_iterator use_iter
;
377 /* At the moment, __builtin_constant_p is the only forbidden
378 predicate function call (see PR49642). */
379 if (!gimple_call_builtin_p (stmt
, BUILT_IN_CONSTANT_P
))
382 lhs
= gimple_call_lhs (stmt
);
384 if (!lhs
|| TREE_CODE (lhs
) != SSA_NAME
)
387 FOR_EACH_IMM_USE_FAST (use_p
, use_iter
, lhs
)
390 basic_block use_bb
, forbidden_bb
;
392 edge true_edge
, false_edge
;
395 use_stmt
= dyn_cast
<gcond
*> (USE_STMT (use_p
));
399 /* Assuming canonical form for GIMPLE_COND here, with constant
400 in second position. */
401 op1
= gimple_cond_rhs (use_stmt
);
402 code
= gimple_cond_code (use_stmt
);
403 use_bb
= gimple_bb (use_stmt
);
405 extract_true_false_edges_from_block (use_bb
, &true_edge
, &false_edge
);
407 /* We're only interested in comparisons that distinguish
408 unambiguously from zero. */
409 if (!integer_zerop (op1
) || code
== LE_EXPR
|| code
== GE_EXPR
)
413 forbidden_bb
= false_edge
->dest
;
415 forbidden_bb
= true_edge
->dest
;
417 bitmap_set_bit (forbidden_dominators
, forbidden_bb
->index
);
421 /* If BB is dominated by any block in the forbidden dominators set,
422 return TRUE; else FALSE. */
425 dominated_by_forbidden (basic_block bb
)
430 EXECUTE_IF_SET_IN_BITMAP (forbidden_dominators
, 1, dom_bb
, bi
)
432 if (dominated_by_p (CDI_DOMINATORS
, bb
,
433 BASIC_BLOCK_FOR_FN (cfun
, dom_bb
)))
440 /* For give split point CURRENT and return block RETURN_BB return 1
441 if ssa name VAL is set by split part and 0 otherwise. */
443 split_part_set_ssa_name_p (tree val
, struct split_point
*current
,
444 basic_block return_bb
)
446 if (TREE_CODE (val
) != SSA_NAME
)
449 return (!SSA_NAME_IS_DEFAULT_DEF (val
)
450 && (bitmap_bit_p (current
->split_bbs
,
451 gimple_bb (SSA_NAME_DEF_STMT (val
))->index
)
452 || gimple_bb (SSA_NAME_DEF_STMT (val
)) == return_bb
));
455 /* We found an split_point CURRENT. NON_SSA_VARS is bitmap of all non ssa
456 variables used and RETURN_BB is return basic block.
457 See if we can split function here. */
460 consider_split (struct split_point
*current
, bitmap non_ssa_vars
,
461 basic_block return_bb
)
464 unsigned int num_args
= 0;
465 unsigned int call_overhead
;
470 int incoming_freq
= 0;
473 bool back_edge
= false;
475 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
476 dump_split_point (dump_file
, current
);
478 FOR_EACH_EDGE (e
, ei
, current
->entry_bb
->preds
)
480 if (e
->flags
& EDGE_DFS_BACK
)
482 if (!bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
483 incoming_freq
+= EDGE_FREQUENCY (e
);
486 /* Do not split when we would end up calling function anyway. */
488 >= (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
489 * PARAM_VALUE (PARAM_PARTIAL_INLINING_ENTRY_PROBABILITY
) / 100))
491 /* When profile is guessed, we can not expect it to give us
492 realistic estimate on likelyness of function taking the
493 complex path. As a special case, when tail of the function is
494 a loop, enable splitting since inlining code skipping the loop
495 is likely noticeable win. */
497 && profile_status_for_fn (cfun
) != PROFILE_READ
498 && incoming_freq
< ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
)
500 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
502 " Split before loop, accepting despite low frequencies %i %i.\n",
504 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
);
508 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
510 " Refused: incoming frequency is too large.\n");
515 if (!current
->header_size
)
517 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
518 fprintf (dump_file
, " Refused: header empty\n");
522 /* Verify that PHI args on entry are either virtual or all their operands
523 incoming from header are the same. */
524 for (bsi
= gsi_start_phis (current
->entry_bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
526 gphi
*stmt
= bsi
.phi ();
529 if (virtual_operand_p (gimple_phi_result (stmt
)))
531 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
533 edge e
= gimple_phi_arg_edge (stmt
, i
);
534 if (!bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
536 tree edge_val
= gimple_phi_arg_def (stmt
, i
);
537 if (val
&& edge_val
!= val
)
539 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
541 " Refused: entry BB has PHI with multiple variants\n");
550 /* See what argument we will pass to the split function and compute
552 call_overhead
= eni_size_weights
.call_cost
;
553 for (parm
= DECL_ARGUMENTS (current_function_decl
); parm
;
554 parm
= DECL_CHAIN (parm
))
556 if (!is_gimple_reg (parm
))
558 if (bitmap_bit_p (non_ssa_vars
, DECL_UID (parm
)))
560 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
562 " Refused: need to pass non-ssa param values\n");
568 tree ddef
= ssa_default_def (cfun
, parm
);
570 && bitmap_bit_p (current
->ssa_names_to_pass
,
571 SSA_NAME_VERSION (ddef
)))
573 if (!VOID_TYPE_P (TREE_TYPE (parm
)))
574 call_overhead
+= estimate_move_cost (TREE_TYPE (parm
), false);
579 if (!VOID_TYPE_P (TREE_TYPE (current_function_decl
)))
580 call_overhead
+= estimate_move_cost (TREE_TYPE (current_function_decl
),
583 if (current
->split_size
<= call_overhead
)
585 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
587 " Refused: split size is smaller than call overhead\n");
590 if (current
->header_size
+ call_overhead
591 >= (unsigned int)(DECL_DECLARED_INLINE_P (current_function_decl
)
592 ? MAX_INLINE_INSNS_SINGLE
593 : MAX_INLINE_INSNS_AUTO
))
595 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
597 " Refused: header size is too large for inline candidate\n");
601 /* Splitting functions brings the target out of comdat group; this will
602 lead to code duplication if the function is reused by other unit.
603 Limit this duplication. This is consistent with limit in tree-sra.c
604 FIXME: with LTO we ought to be able to do better! */
605 if (DECL_ONE_ONLY (current_function_decl
)
606 && current
->split_size
>= (unsigned int) MAX_INLINE_INSNS_AUTO
)
608 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
610 " Refused: function is COMDAT and tail is too large\n");
613 /* For comdat functions also reject very small tails; those will likely get
614 inlined back and we do not want to risk the duplication overhead.
615 FIXME: with LTO we ought to be able to do better! */
616 if (DECL_ONE_ONLY (current_function_decl
)
617 && current
->split_size
618 <= (unsigned int) PARAM_VALUE (PARAM_EARLY_INLINING_INSNS
) / 2)
620 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
622 " Refused: function is COMDAT and tail is too small\n");
626 /* FIXME: we currently can pass only SSA function parameters to the split
627 arguments. Once parm_adjustment infrastructure is supported by cloning,
628 we can pass more than that. */
629 if (num_args
!= bitmap_count_bits (current
->ssa_names_to_pass
))
632 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
634 " Refused: need to pass non-param values\n");
638 /* When there are non-ssa vars used in the split region, see if they
639 are used in the header region. If so, reject the split.
640 FIXME: we can use nested function support to access both. */
641 if (!bitmap_empty_p (non_ssa_vars
)
642 && !verify_non_ssa_vars (current
, non_ssa_vars
, return_bb
))
644 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
646 " Refused: split part has non-ssa uses\n");
650 /* If the split point is dominated by a forbidden block, reject
652 if (!bitmap_empty_p (forbidden_dominators
)
653 && dominated_by_forbidden (current
->entry_bb
))
655 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
657 " Refused: split point dominated by forbidden block\n");
661 /* See if retval used by return bb is computed by header or split part.
662 When it is computed by split part, we need to produce return statement
663 in the split part and add code to header to pass it around.
665 This is bit tricky to test:
666 1) When there is no return_bb or no return value, we always pass
668 2) Invariants are always computed by caller.
669 3) For SSA we need to look if defining statement is in header or split part
670 4) For non-SSA we need to look where the var is computed. */
671 retval
= find_retval (return_bb
);
673 current
->split_part_set_retval
= true;
674 else if (is_gimple_min_invariant (retval
))
675 current
->split_part_set_retval
= false;
676 /* Special case is value returned by reference we record as if it was non-ssa
677 set to result_decl. */
678 else if (TREE_CODE (retval
) == SSA_NAME
679 && SSA_NAME_VAR (retval
)
680 && TREE_CODE (SSA_NAME_VAR (retval
)) == RESULT_DECL
681 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
682 current
->split_part_set_retval
683 = bitmap_bit_p (non_ssa_vars
, DECL_UID (SSA_NAME_VAR (retval
)));
684 else if (TREE_CODE (retval
) == SSA_NAME
)
685 current
->split_part_set_retval
686 = split_part_set_ssa_name_p (retval
, current
, return_bb
);
687 else if (TREE_CODE (retval
) == PARM_DECL
)
688 current
->split_part_set_retval
= false;
689 else if (TREE_CODE (retval
) == VAR_DECL
690 || TREE_CODE (retval
) == RESULT_DECL
)
691 current
->split_part_set_retval
692 = bitmap_bit_p (non_ssa_vars
, DECL_UID (retval
));
694 current
->split_part_set_retval
= true;
696 /* See if retbnd used by return bb is computed by header or split part. */
697 retbnd
= find_retbnd (return_bb
);
700 bool split_part_set_retbnd
701 = split_part_set_ssa_name_p (retbnd
, current
, return_bb
);
703 /* If we have both return value and bounds then keep their definitions
704 in a single function. We use SSA names to link returned bounds and
705 value and therefore do not handle cases when result is passed by
706 reference (which should not be our case anyway since bounds are
707 returned for pointers only). */
708 if ((DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
))
709 && current
->split_part_set_retval
)
710 || split_part_set_retbnd
!= current
->split_part_set_retval
)
712 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
714 " Refused: split point splits return value and bounds\n");
719 /* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
720 for the return value. If there are other PHIs, give up. */
721 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
725 for (psi
= gsi_start_phis (return_bb
); !gsi_end_p (psi
); gsi_next (&psi
))
726 if (!virtual_operand_p (gimple_phi_result (psi
.phi ()))
728 && current
->split_part_set_retval
729 && TREE_CODE (retval
) == SSA_NAME
730 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
))
731 && SSA_NAME_DEF_STMT (retval
) == psi
.phi ()))
733 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
735 " Refused: return bb has extra PHIs\n");
740 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
741 fprintf (dump_file
, " Accepted!\n");
743 /* At the moment chose split point with lowest frequency and that leaves
744 out smallest size of header.
745 In future we might re-consider this heuristics. */
746 if (!best_split_point
.split_bbs
747 || best_split_point
.entry_bb
->frequency
> current
->entry_bb
->frequency
748 || (best_split_point
.entry_bb
->frequency
== current
->entry_bb
->frequency
749 && best_split_point
.split_size
< current
->split_size
))
752 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
753 fprintf (dump_file
, " New best split point!\n");
754 if (best_split_point
.ssa_names_to_pass
)
756 BITMAP_FREE (best_split_point
.ssa_names_to_pass
);
757 BITMAP_FREE (best_split_point
.split_bbs
);
759 best_split_point
= *current
;
760 best_split_point
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
761 bitmap_copy (best_split_point
.ssa_names_to_pass
,
762 current
->ssa_names_to_pass
);
763 best_split_point
.split_bbs
= BITMAP_ALLOC (NULL
);
764 bitmap_copy (best_split_point
.split_bbs
, current
->split_bbs
);
768 /* Return basic block containing RETURN statement. We allow basic blocks
772 but return_bb can not be more complex than this (except for
773 -fsanitize=thread we allow TSAN_FUNC_EXIT () internal call in there).
774 If nothing is found, return the exit block.
776 When there are multiple RETURN statement, chose one with return value,
777 since that one is more likely shared by multiple code paths.
779 Return BB is special, because for function splitting it is the only
780 basic block that is duplicated in between header and split part of the
783 TODO: We might support multiple return blocks. */
786 find_return_bb (void)
789 basic_block return_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
);
790 gimple_stmt_iterator bsi
;
791 bool found_return
= false;
792 tree retval
= NULL_TREE
;
794 if (!single_pred_p (EXIT_BLOCK_PTR_FOR_FN (cfun
)))
797 e
= single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
));
798 for (bsi
= gsi_last_bb (e
->src
); !gsi_end_p (bsi
); gsi_prev (&bsi
))
800 gimple stmt
= gsi_stmt (bsi
);
801 if (gimple_code (stmt
) == GIMPLE_LABEL
802 || is_gimple_debug (stmt
)
803 || gimple_clobber_p (stmt
))
805 else if (gimple_code (stmt
) == GIMPLE_ASSIGN
807 && gimple_assign_single_p (stmt
)
808 && (auto_var_in_fn_p (gimple_assign_rhs1 (stmt
),
809 current_function_decl
)
810 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
811 && retval
== gimple_assign_lhs (stmt
))
813 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
816 retval
= gimple_return_retval (return_stmt
);
818 /* For -fsanitize=thread, allow also TSAN_FUNC_EXIT () in the return
820 else if ((flag_sanitize
& SANITIZE_THREAD
)
821 && is_gimple_call (stmt
)
822 && gimple_call_internal_p (stmt
)
823 && gimple_call_internal_fn (stmt
) == IFN_TSAN_FUNC_EXIT
)
828 if (gsi_end_p (bsi
) && found_return
)
834 /* Given return basic block RETURN_BB, see where return value is really
837 find_retval (basic_block return_bb
)
839 gimple_stmt_iterator bsi
;
840 for (bsi
= gsi_start_bb (return_bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
841 if (greturn
*return_stmt
= dyn_cast
<greturn
*> (gsi_stmt (bsi
)))
842 return gimple_return_retval (return_stmt
);
843 else if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_ASSIGN
844 && !gimple_clobber_p (gsi_stmt (bsi
)))
845 return gimple_assign_rhs1 (gsi_stmt (bsi
));
849 /* Given return basic block RETURN_BB, see where return bounds are really
852 find_retbnd (basic_block return_bb
)
854 gimple_stmt_iterator bsi
;
855 for (bsi
= gsi_last_bb (return_bb
); !gsi_end_p (bsi
); gsi_prev (&bsi
))
856 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_RETURN
)
857 return gimple_return_retbnd (gsi_stmt (bsi
));
861 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
862 variable, mark it as used in bitmap passed via DATA.
863 Return true when access to T prevents splitting the function. */
866 mark_nonssa_use (gimple
, tree t
, tree
, void *data
)
868 t
= get_base_address (t
);
870 if (!t
|| is_gimple_reg (t
))
873 /* At present we can't pass non-SSA arguments to split function.
874 FIXME: this can be relaxed by passing references to arguments. */
875 if (TREE_CODE (t
) == PARM_DECL
)
877 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
879 "Cannot split: use of non-ssa function parameter.\n");
883 if ((TREE_CODE (t
) == VAR_DECL
884 && auto_var_in_fn_p (t
, current_function_decl
))
885 || TREE_CODE (t
) == RESULT_DECL
886 || (TREE_CODE (t
) == LABEL_DECL
887 && FORCED_LABEL (t
)))
888 bitmap_set_bit ((bitmap
)data
, DECL_UID (t
));
890 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
891 to pretend that the value pointed to is actual result decl. */
892 if ((TREE_CODE (t
) == MEM_REF
|| INDIRECT_REF_P (t
))
893 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
894 && SSA_NAME_VAR (TREE_OPERAND (t
, 0))
895 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t
, 0))) == RESULT_DECL
896 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
898 bitmap_bit_p ((bitmap
)data
,
899 DECL_UID (DECL_RESULT (current_function_decl
)));
904 /* Compute local properties of basic block BB we collect when looking for
905 split points. We look for ssa defs and store them in SET_SSA_NAMES,
906 for ssa uses and store them in USED_SSA_NAMES and for any non-SSA automatic
907 vars stored in NON_SSA_VARS.
909 When BB has edge to RETURN_BB, collect uses in RETURN_BB too.
911 Return false when BB contains something that prevents it from being put into
915 visit_bb (basic_block bb
, basic_block return_bb
,
916 bitmap set_ssa_names
, bitmap used_ssa_names
,
921 bool can_split
= true;
923 for (gimple_stmt_iterator bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
);
926 gimple stmt
= gsi_stmt (bsi
);
931 if (is_gimple_debug (stmt
))
934 if (gimple_clobber_p (stmt
))
937 /* FIXME: We can split regions containing EH. We can not however
938 split RESX, EH_DISPATCH and EH_POINTER referring to same region
939 into different partitions. This would require tracking of
940 EH regions and checking in consider_split_point if they
941 are not used elsewhere. */
942 if (gimple_code (stmt
) == GIMPLE_RESX
)
944 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
945 fprintf (dump_file
, "Cannot split: resx.\n");
948 if (gimple_code (stmt
) == GIMPLE_EH_DISPATCH
)
950 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
951 fprintf (dump_file
, "Cannot split: eh dispatch.\n");
955 /* Check builtins that prevent splitting. */
956 if (gimple_code (stmt
) == GIMPLE_CALL
957 && (decl
= gimple_call_fndecl (stmt
)) != NULL_TREE
958 && DECL_BUILT_IN (decl
)
959 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
960 switch (DECL_FUNCTION_CODE (decl
))
962 /* FIXME: once we will allow passing non-parm values to split part,
963 we need to be sure to handle correct builtin_stack_save and
964 builtin_stack_restore. At the moment we are safe; there is no
965 way to store builtin_stack_save result in non-SSA variable
966 since all calls to those are compiler generated. */
968 case BUILT_IN_APPLY_ARGS
:
969 case BUILT_IN_VA_START
:
970 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
972 "Cannot split: builtin_apply and va_start.\n");
975 case BUILT_IN_EH_POINTER
:
976 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
977 fprintf (dump_file
, "Cannot split: builtin_eh_pointer.\n");
984 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
985 bitmap_set_bit (set_ssa_names
, SSA_NAME_VERSION (op
));
986 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
987 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
988 can_split
&= !walk_stmt_load_store_addr_ops (stmt
, non_ssa_vars
,
993 for (gphi_iterator bsi
= gsi_start_phis (bb
); !gsi_end_p (bsi
);
996 gphi
*stmt
= bsi
.phi ();
999 if (virtual_operand_p (gimple_phi_result (stmt
)))
1001 bitmap_set_bit (set_ssa_names
,
1002 SSA_NAME_VERSION (gimple_phi_result (stmt
)));
1003 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1005 tree op
= gimple_phi_arg_def (stmt
, i
);
1006 if (TREE_CODE (op
) == SSA_NAME
)
1007 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
1009 can_split
&= !walk_stmt_load_store_addr_ops (stmt
, non_ssa_vars
,
1014 /* Record also uses coming from PHI operand in return BB. */
1015 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1016 if (e
->dest
== return_bb
)
1018 for (gphi_iterator bsi
= gsi_start_phis (return_bb
);
1022 gphi
*stmt
= bsi
.phi ();
1023 tree op
= gimple_phi_arg_def (stmt
, e
->dest_idx
);
1025 if (virtual_operand_p (gimple_phi_result (stmt
)))
1027 if (TREE_CODE (op
) == SSA_NAME
)
1028 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
1030 can_split
&= !mark_nonssa_use (stmt
, op
, op
, non_ssa_vars
);
1036 /* Stack entry for recursive DFS walk in find_split_point. */
1040 /* Basic block we are examining. */
1043 /* SSA names set and used by the BB and all BBs reachable
1044 from it via DFS walk. */
1045 bitmap set_ssa_names
, used_ssa_names
;
1046 bitmap non_ssa_vars
;
1048 /* All BBS visited from this BB via DFS walk. */
1051 /* Last examined edge in DFS walk. Since we walk unoriented graph,
1052 the value is up to sum of incoming and outgoing edges of BB. */
1053 unsigned int edge_num
;
1055 /* Stack entry index of earliest BB reachable from current BB
1056 or any BB visited later in DFS walk. */
1059 /* Overall time and size of all BBs reached from this BB in DFS walk. */
1060 int overall_time
, overall_size
;
1062 /* When false we can not split on this BB. */
1067 /* Find all articulations and call consider_split on them.
1068 OVERALL_TIME and OVERALL_SIZE is time and size of the function.
1070 We perform basic algorithm for finding an articulation in a graph
1071 created from CFG by considering it to be an unoriented graph.
1073 The articulation is discovered via DFS walk. We collect earliest
1074 basic block on stack that is reachable via backward edge. Articulation
1075 is any basic block such that there is no backward edge bypassing it.
1076 To reduce stack usage we maintain heap allocated stack in STACK vector.
1077 AUX pointer of BB is set to index it appears in the stack or -1 once
1078 it is visited and popped off the stack.
1080 The algorithm finds articulation after visiting the whole component
1081 reachable by it. This makes it convenient to collect information about
1082 the component used by consider_split. */
1085 find_split_points (basic_block return_bb
, int overall_time
, int overall_size
)
1088 vec
<stack_entry
> stack
= vNULL
;
1090 struct split_point current
;
1092 current
.header_time
= overall_time
;
1093 current
.header_size
= overall_size
;
1094 current
.split_time
= 0;
1095 current
.split_size
= 0;
1096 current
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
1098 first
.bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
1100 first
.overall_time
= 0;
1101 first
.overall_size
= 0;
1102 first
.earliest
= INT_MAX
;
1103 first
.set_ssa_names
= 0;
1104 first
.used_ssa_names
= 0;
1105 first
.non_ssa_vars
= 0;
1106 first
.bbs_visited
= 0;
1107 first
.can_split
= false;
1108 stack
.safe_push (first
);
1109 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->aux
= (void *)(intptr_t)-1;
1111 while (!stack
.is_empty ())
1113 stack_entry
*entry
= &stack
.last ();
1115 /* We are walking an acyclic graph, so edge_num counts
1116 succ and pred edges together. However when considering
1117 articulation, we want to have processed everything reachable
1118 from articulation but nothing that reaches into it. */
1119 if (entry
->edge_num
== EDGE_COUNT (entry
->bb
->succs
)
1120 && entry
->bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1122 int pos
= stack
.length ();
1123 entry
->can_split
&= visit_bb (entry
->bb
, return_bb
,
1124 entry
->set_ssa_names
,
1125 entry
->used_ssa_names
,
1126 entry
->non_ssa_vars
);
1127 if (pos
<= entry
->earliest
&& !entry
->can_split
1128 && dump_file
&& (dump_flags
& TDF_DETAILS
))
1130 "found articulation at bb %i but can not split\n",
1132 if (pos
<= entry
->earliest
&& entry
->can_split
)
1134 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1135 fprintf (dump_file
, "found articulation at bb %i\n",
1137 current
.entry_bb
= entry
->bb
;
1138 current
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
1139 bitmap_and_compl (current
.ssa_names_to_pass
,
1140 entry
->used_ssa_names
, entry
->set_ssa_names
);
1141 current
.header_time
= overall_time
- entry
->overall_time
;
1142 current
.header_size
= overall_size
- entry
->overall_size
;
1143 current
.split_time
= entry
->overall_time
;
1144 current
.split_size
= entry
->overall_size
;
1145 current
.split_bbs
= entry
->bbs_visited
;
1146 consider_split (¤t
, entry
->non_ssa_vars
, return_bb
);
1147 BITMAP_FREE (current
.ssa_names_to_pass
);
1150 /* Do actual DFS walk. */
1152 < (EDGE_COUNT (entry
->bb
->succs
)
1153 + EDGE_COUNT (entry
->bb
->preds
)))
1157 if (entry
->edge_num
< EDGE_COUNT (entry
->bb
->succs
))
1159 e
= EDGE_SUCC (entry
->bb
, entry
->edge_num
);
1164 e
= EDGE_PRED (entry
->bb
, entry
->edge_num
1165 - EDGE_COUNT (entry
->bb
->succs
));
1171 /* New BB to visit, push it to the stack. */
1172 if (dest
!= return_bb
&& dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
1175 stack_entry new_entry
;
1177 new_entry
.bb
= dest
;
1178 new_entry
.edge_num
= 0;
1179 new_entry
.overall_time
1180 = bb_info_vec
[dest
->index
].time
;
1181 new_entry
.overall_size
1182 = bb_info_vec
[dest
->index
].size
;
1183 new_entry
.earliest
= INT_MAX
;
1184 new_entry
.set_ssa_names
= BITMAP_ALLOC (NULL
);
1185 new_entry
.used_ssa_names
= BITMAP_ALLOC (NULL
);
1186 new_entry
.bbs_visited
= BITMAP_ALLOC (NULL
);
1187 new_entry
.non_ssa_vars
= BITMAP_ALLOC (NULL
);
1188 new_entry
.can_split
= true;
1189 bitmap_set_bit (new_entry
.bbs_visited
, dest
->index
);
1190 stack
.safe_push (new_entry
);
1191 dest
->aux
= (void *)(intptr_t)stack
.length ();
1193 /* Back edge found, record the earliest point. */
1194 else if ((intptr_t)dest
->aux
> 0
1195 && (intptr_t)dest
->aux
< entry
->earliest
)
1196 entry
->earliest
= (intptr_t)dest
->aux
;
1198 /* We are done with examining the edges. Pop off the value from stack
1199 and merge stuff we accumulate during the walk. */
1200 else if (entry
->bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1202 stack_entry
*prev
= &stack
[stack
.length () - 2];
1204 entry
->bb
->aux
= (void *)(intptr_t)-1;
1205 prev
->can_split
&= entry
->can_split
;
1206 if (prev
->set_ssa_names
)
1208 bitmap_ior_into (prev
->set_ssa_names
, entry
->set_ssa_names
);
1209 bitmap_ior_into (prev
->used_ssa_names
, entry
->used_ssa_names
);
1210 bitmap_ior_into (prev
->bbs_visited
, entry
->bbs_visited
);
1211 bitmap_ior_into (prev
->non_ssa_vars
, entry
->non_ssa_vars
);
1213 if (prev
->earliest
> entry
->earliest
)
1214 prev
->earliest
= entry
->earliest
;
1215 prev
->overall_time
+= entry
->overall_time
;
1216 prev
->overall_size
+= entry
->overall_size
;
1217 BITMAP_FREE (entry
->set_ssa_names
);
1218 BITMAP_FREE (entry
->used_ssa_names
);
1219 BITMAP_FREE (entry
->bbs_visited
);
1220 BITMAP_FREE (entry
->non_ssa_vars
);
1226 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->aux
= NULL
;
1227 FOR_EACH_BB_FN (bb
, cfun
)
1230 BITMAP_FREE (current
.ssa_names_to_pass
);
1233 /* Split function at SPLIT_POINT. */
1236 split_function (basic_block return_bb
, struct split_point
*split_point
,
1237 bool add_tsan_func_exit
)
1239 vec
<tree
> args_to_pass
= vNULL
;
1240 bitmap args_to_skip
;
1243 cgraph_node
*node
, *cur_node
= cgraph_node::get (current_function_decl
);
1244 basic_block call_bb
;
1245 gcall
*call
, *tsan_func_exit_call
= NULL
;
1248 tree retval
= NULL
, real_retval
= NULL
, retbnd
= NULL
;
1249 bool split_part_return_p
= false;
1250 bool with_bounds
= chkp_function_instrumented_p (current_function_decl
);
1251 gimple last_stmt
= NULL
;
1254 vec
<tree
, va_gc
> **debug_args
= NULL
;
1258 fprintf (dump_file
, "\n\nSplitting function at:\n");
1259 dump_split_point (dump_file
, split_point
);
1262 if (cur_node
->local
.can_change_signature
)
1263 args_to_skip
= BITMAP_ALLOC (NULL
);
1265 args_to_skip
= NULL
;
1267 /* Collect the parameters of new function and args_to_skip bitmap. */
1268 for (parm
= DECL_ARGUMENTS (current_function_decl
);
1269 parm
; parm
= DECL_CHAIN (parm
), num
++)
1271 && (!is_gimple_reg (parm
)
1272 || (ddef
= ssa_default_def (cfun
, parm
)) == NULL_TREE
1273 || !bitmap_bit_p (split_point
->ssa_names_to_pass
,
1274 SSA_NAME_VERSION (ddef
))))
1275 bitmap_set_bit (args_to_skip
, num
);
1278 /* This parm might not have been used up to now, but is going to be
1279 used, hence register it. */
1280 if (is_gimple_reg (parm
))
1281 arg
= get_or_create_ssa_default_def (cfun
, parm
);
1285 if (!useless_type_conversion_p (DECL_ARG_TYPE (parm
), TREE_TYPE (arg
)))
1286 arg
= fold_convert (DECL_ARG_TYPE (parm
), arg
);
1287 args_to_pass
.safe_push (arg
);
1290 /* See if the split function will return. */
1291 FOR_EACH_EDGE (e
, ei
, return_bb
->preds
)
1292 if (bitmap_bit_p (split_point
->split_bbs
, e
->src
->index
))
1295 split_part_return_p
= true;
1297 /* Add return block to what will become the split function.
1298 We do not return; no return block is needed. */
1299 if (!split_part_return_p
)
1301 /* We have no return block, so nothing is needed. */
1302 else if (return_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1304 /* When we do not want to return value, we need to construct
1305 new return block with empty return statement.
1306 FIXME: Once we are able to change return type, we should change function
1307 to return void instead of just outputting function with undefined return
1308 value. For structures this affects quality of codegen. */
1309 else if (!split_point
->split_part_set_retval
1310 && find_retval (return_bb
))
1312 bool redirected
= true;
1313 basic_block new_return_bb
= create_basic_block (NULL
, 0, return_bb
);
1314 gimple_stmt_iterator gsi
= gsi_start_bb (new_return_bb
);
1315 gsi_insert_after (&gsi
, gimple_build_return (NULL
), GSI_NEW_STMT
);
1319 FOR_EACH_EDGE (e
, ei
, return_bb
->preds
)
1320 if (bitmap_bit_p (split_point
->split_bbs
, e
->src
->index
))
1322 new_return_bb
->count
+= e
->count
;
1323 new_return_bb
->frequency
+= EDGE_FREQUENCY (e
);
1324 redirect_edge_and_branch (e
, new_return_bb
);
1329 e
= make_edge (new_return_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
1330 e
->probability
= REG_BR_PROB_BASE
;
1331 e
->count
= new_return_bb
->count
;
1332 add_bb_to_loop (new_return_bb
, current_loops
->tree_root
);
1333 bitmap_set_bit (split_point
->split_bbs
, new_return_bb
->index
);
1335 /* When we pass around the value, use existing return block. */
1337 bitmap_set_bit (split_point
->split_bbs
, return_bb
->index
);
1339 /* If RETURN_BB has virtual operand PHIs, they must be removed and the
1340 virtual operand marked for renaming as we change the CFG in a way that
1341 tree-inline is not able to compensate for.
1343 Note this can happen whether or not we have a return value. If we have
1344 a return value, then RETURN_BB may have PHIs for real operands too. */
1345 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1348 for (gphi_iterator gsi
= gsi_start_phis (return_bb
);
1351 gphi
*stmt
= gsi
.phi ();
1352 if (!virtual_operand_p (gimple_phi_result (stmt
)))
1357 mark_virtual_phi_result_for_renaming (stmt
);
1358 remove_phi_node (&gsi
, true);
1361 /* In reality we have to rename the reaching definition of the
1362 virtual operand at return_bb as we will eventually release it
1363 when we remove the code region we outlined.
1364 So we have to rename all immediate virtual uses of that region
1365 if we didn't see a PHI definition yet. */
1366 /* ??? In real reality we want to set the reaching vdef of the
1367 entry of the SESE region as the vuse of the call and the reaching
1368 vdef of the exit of the SESE region as the vdef of the call. */
1370 for (gimple_stmt_iterator gsi
= gsi_start_bb (return_bb
);
1374 gimple stmt
= gsi_stmt (gsi
);
1375 if (gimple_vuse (stmt
))
1377 gimple_set_vuse (stmt
, NULL_TREE
);
1380 if (gimple_vdef (stmt
))
1385 /* Now create the actual clone. */
1386 cgraph_edge::rebuild_edges ();
1387 node
= cur_node
->create_version_clone_with_body
1388 (vNULL
, NULL
, args_to_skip
, !split_part_return_p
, split_point
->split_bbs
,
1389 split_point
->entry_bb
, "part");
1391 node
->split_part
= true;
1393 /* Let's take a time profile for splitted function. */
1394 node
->tp_first_run
= cur_node
->tp_first_run
+ 1;
1396 /* For usual cloning it is enough to clear builtin only when signature
1397 changes. For partial inlining we however can not expect the part
1398 of builtin implementation to have same semantic as the whole. */
1399 if (DECL_BUILT_IN (node
->decl
))
1401 DECL_BUILT_IN_CLASS (node
->decl
) = NOT_BUILT_IN
;
1402 DECL_FUNCTION_CODE (node
->decl
) = (enum built_in_function
) 0;
1405 /* If the original function is instrumented then it's
1406 part is also instrumented. */
1408 chkp_function_mark_instrumented (node
->decl
);
1410 /* If the original function is declared inline, there is no point in issuing
1411 a warning for the non-inlinable part. */
1412 DECL_NO_INLINE_WARNING_P (node
->decl
) = 1;
1413 cur_node
->remove_callees ();
1414 cur_node
->remove_all_references ();
1415 if (!split_part_return_p
)
1416 TREE_THIS_VOLATILE (node
->decl
) = 1;
1418 dump_function_to_file (node
->decl
, dump_file
, dump_flags
);
1420 /* Create the basic block we place call into. It is the entry basic block
1421 split after last label. */
1422 call_bb
= split_point
->entry_bb
;
1423 for (gimple_stmt_iterator gsi
= gsi_start_bb (call_bb
); !gsi_end_p (gsi
);)
1424 if (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
)
1426 last_stmt
= gsi_stmt (gsi
);
1431 e
= split_block (split_point
->entry_bb
, last_stmt
);
1434 /* Produce the call statement. */
1435 gimple_stmt_iterator gsi
= gsi_last_bb (call_bb
);
1436 FOR_EACH_VEC_ELT (args_to_pass
, i
, arg
)
1437 if (!is_gimple_val (arg
))
1439 arg
= force_gimple_operand_gsi (&gsi
, arg
, true, NULL_TREE
,
1440 false, GSI_CONTINUE_LINKING
);
1441 args_to_pass
[i
] = arg
;
1443 call
= gimple_build_call_vec (node
->decl
, args_to_pass
);
1444 gimple_call_set_with_bounds (call
, with_bounds
);
1445 gimple_set_block (call
, DECL_INITIAL (current_function_decl
));
1446 args_to_pass
.release ();
1448 /* For optimized away parameters, add on the caller side
1450 DEBUG D#X => parm_Y(D)
1451 stmts and associate D#X with parm in decl_debug_args_lookup
1452 vector to say for debug info that if parameter parm had been passed,
1453 it would have value parm_Y(D). */
1455 for (parm
= DECL_ARGUMENTS (current_function_decl
), num
= 0;
1456 parm
; parm
= DECL_CHAIN (parm
), num
++)
1457 if (bitmap_bit_p (args_to_skip
, num
)
1458 && is_gimple_reg (parm
))
1463 /* This needs to be done even without MAY_HAVE_DEBUG_STMTS,
1464 otherwise if it didn't exist before, we'd end up with
1465 different SSA_NAME_VERSIONs between -g and -g0. */
1466 arg
= get_or_create_ssa_default_def (cfun
, parm
);
1467 if (!MAY_HAVE_DEBUG_STMTS
)
1470 if (debug_args
== NULL
)
1471 debug_args
= decl_debug_args_insert (node
->decl
);
1472 ddecl
= make_node (DEBUG_EXPR_DECL
);
1473 DECL_ARTIFICIAL (ddecl
) = 1;
1474 TREE_TYPE (ddecl
) = TREE_TYPE (parm
);
1475 DECL_MODE (ddecl
) = DECL_MODE (parm
);
1476 vec_safe_push (*debug_args
, DECL_ORIGIN (parm
));
1477 vec_safe_push (*debug_args
, ddecl
);
1478 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
),
1480 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
1482 /* And on the callee side, add
1485 stmts to the first bb where var is a VAR_DECL created for the
1486 optimized away parameter in DECL_INITIAL block. This hints
1487 in the debug info that var (whole DECL_ORIGIN is the parm PARM_DECL)
1488 is optimized away, but could be looked up at the call site
1489 as value of D#X there. */
1490 if (debug_args
!= NULL
)
1494 gimple_stmt_iterator cgsi
;
1497 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
1498 var
= BLOCK_VARS (DECL_INITIAL (node
->decl
));
1499 i
= vec_safe_length (*debug_args
);
1500 cgsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
1504 while (var
!= NULL_TREE
1505 && DECL_ABSTRACT_ORIGIN (var
) != (**debug_args
)[i
])
1506 var
= TREE_CHAIN (var
);
1507 if (var
== NULL_TREE
)
1509 vexpr
= make_node (DEBUG_EXPR_DECL
);
1510 parm
= (**debug_args
)[i
];
1511 DECL_ARTIFICIAL (vexpr
) = 1;
1512 TREE_TYPE (vexpr
) = TREE_TYPE (parm
);
1513 DECL_MODE (vexpr
) = DECL_MODE (parm
);
1514 def_temp
= gimple_build_debug_source_bind (vexpr
, parm
,
1516 gsi_insert_before (&cgsi
, def_temp
, GSI_SAME_STMT
);
1517 def_temp
= gimple_build_debug_bind (var
, vexpr
, NULL
);
1518 gsi_insert_before (&cgsi
, def_temp
, GSI_SAME_STMT
);
1524 /* We avoid address being taken on any variable used by split part,
1525 so return slot optimization is always possible. Moreover this is
1526 required to make DECL_BY_REFERENCE work. */
1527 if (aggregate_value_p (DECL_RESULT (current_function_decl
),
1528 TREE_TYPE (current_function_decl
))
1529 && (!is_gimple_reg_type (TREE_TYPE (DECL_RESULT (current_function_decl
)))
1530 || DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
))))
1531 gimple_call_set_return_slot_opt (call
, true);
1533 if (add_tsan_func_exit
)
1534 tsan_func_exit_call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
1536 /* Update return value. This is bit tricky. When we do not return,
1537 do nothing. When we return we might need to update return_bb
1538 or produce a new return statement. */
1539 if (!split_part_return_p
)
1541 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1542 if (tsan_func_exit_call
)
1543 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1547 e
= make_edge (call_bb
, return_bb
,
1548 return_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
1549 ? 0 : EDGE_FALLTHRU
);
1550 e
->count
= call_bb
->count
;
1551 e
->probability
= REG_BR_PROB_BASE
;
1553 /* If there is return basic block, see what value we need to store
1554 return value into and put call just before it. */
1555 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1557 real_retval
= retval
= find_retval (return_bb
);
1558 retbnd
= find_retbnd (return_bb
);
1560 if (real_retval
&& split_point
->split_part_set_retval
)
1564 /* See if we need new SSA_NAME for the result.
1565 When DECL_BY_REFERENCE is true, retval is actually pointer to
1566 return value and it is constant in whole function. */
1567 if (TREE_CODE (retval
) == SSA_NAME
1568 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1570 retval
= copy_ssa_name (retval
, call
);
1572 /* See if there is PHI defining return value. */
1573 for (psi
= gsi_start_phis (return_bb
);
1574 !gsi_end_p (psi
); gsi_next (&psi
))
1575 if (!virtual_operand_p (gimple_phi_result (psi
.phi ())))
1578 /* When there is PHI, just update its value. */
1579 if (TREE_CODE (retval
) == SSA_NAME
1580 && !gsi_end_p (psi
))
1581 add_phi_arg (psi
.phi (), retval
, e
, UNKNOWN_LOCATION
);
1582 /* Otherwise update the return BB itself.
1583 find_return_bb allows at most one assignment to return value,
1584 so update first statement. */
1587 gimple_stmt_iterator bsi
;
1588 for (bsi
= gsi_start_bb (return_bb
); !gsi_end_p (bsi
);
1590 if (greturn
*return_stmt
1591 = dyn_cast
<greturn
*> (gsi_stmt (bsi
)))
1593 gimple_return_set_retval (return_stmt
, retval
);
1596 else if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_ASSIGN
1597 && !gimple_clobber_p (gsi_stmt (bsi
)))
1599 gimple_assign_set_rhs1 (gsi_stmt (bsi
), retval
);
1602 update_stmt (gsi_stmt (bsi
));
1605 /* Replace retbnd with new one. */
1608 gimple_stmt_iterator bsi
;
1609 for (bsi
= gsi_last_bb (return_bb
); !gsi_end_p (bsi
);
1611 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_RETURN
)
1613 retbnd
= copy_ssa_name (retbnd
, call
);
1614 gimple_return_set_retbnd (gsi_stmt (bsi
), retbnd
);
1615 update_stmt (gsi_stmt (bsi
));
1620 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1622 gimple_call_set_lhs (call
, build_simple_mem_ref (retval
));
1623 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1628 restype
= TREE_TYPE (DECL_RESULT (current_function_decl
));
1629 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1630 if (!useless_type_conversion_p (TREE_TYPE (retval
), restype
))
1633 tree tem
= create_tmp_reg (restype
);
1634 tem
= make_ssa_name (tem
, call
);
1635 cpy
= gimple_build_assign (retval
, NOP_EXPR
, tem
);
1636 gsi_insert_after (&gsi
, cpy
, GSI_NEW_STMT
);
1639 /* Build bndret call to obtain returned bounds. */
1641 chkp_insert_retbnd_call (retbnd
, retval
, &gsi
);
1642 gimple_call_set_lhs (call
, retval
);
1647 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1648 if (tsan_func_exit_call
)
1649 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1651 /* We don't use return block (there is either no return in function or
1652 multiple of them). So create new basic block with return statement.
1657 if (split_point
->split_part_set_retval
1658 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1660 retval
= DECL_RESULT (current_function_decl
);
1662 if (chkp_function_instrumented_p (current_function_decl
)
1663 && BOUNDED_P (retval
))
1664 retbnd
= create_tmp_reg (pointer_bounds_type_node
);
1666 /* We use temporary register to hold value when aggregate_value_p
1667 is false. Similarly for DECL_BY_REFERENCE we must avoid extra
1669 if (!aggregate_value_p (retval
, TREE_TYPE (current_function_decl
))
1670 && !DECL_BY_REFERENCE (retval
))
1671 retval
= create_tmp_reg (TREE_TYPE (retval
));
1672 if (is_gimple_reg (retval
))
1674 /* When returning by reference, there is only one SSA name
1675 assigned to RESULT_DECL (that is pointer to return value).
1676 Look it up or create new one if it is missing. */
1677 if (DECL_BY_REFERENCE (retval
))
1678 retval
= get_or_create_ssa_default_def (cfun
, retval
);
1679 /* Otherwise produce new SSA name for return value. */
1681 retval
= make_ssa_name (retval
, call
);
1683 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1684 gimple_call_set_lhs (call
, build_simple_mem_ref (retval
));
1686 gimple_call_set_lhs (call
, retval
);
1688 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1689 /* Build bndret call to obtain returned bounds. */
1691 chkp_insert_retbnd_call (retbnd
, retval
, &gsi
);
1692 if (tsan_func_exit_call
)
1693 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1694 ret
= gimple_build_return (retval
);
1695 gsi_insert_after (&gsi
, ret
, GSI_NEW_STMT
);
1698 free_dominance_info (CDI_DOMINATORS
);
1699 free_dominance_info (CDI_POST_DOMINATORS
);
1700 compute_inline_parameters (node
, true);
1703 /* Execute function splitting pass. */
1706 execute_split_functions (void)
1708 gimple_stmt_iterator bsi
;
1710 int overall_time
= 0, overall_size
= 0;
1712 struct cgraph_node
*node
= cgraph_node::get (current_function_decl
);
1714 if (flags_from_decl_or_type (current_function_decl
)
1715 & (ECF_NORETURN
|ECF_MALLOC
))
1718 fprintf (dump_file
, "Not splitting: noreturn/malloc function.\n");
1721 if (MAIN_NAME_P (DECL_NAME (current_function_decl
)))
1724 fprintf (dump_file
, "Not splitting: main function.\n");
1727 /* This can be relaxed; function might become inlinable after splitting
1728 away the uninlinable part. */
1729 if (inline_edge_summary_vec
.exists ()
1730 && !inline_summaries
->get (node
)->inlinable
)
1733 fprintf (dump_file
, "Not splitting: not inlinable.\n");
1736 if (DECL_DISREGARD_INLINE_LIMITS (node
->decl
))
1739 fprintf (dump_file
, "Not splitting: disregarding inline limits.\n");
1742 /* This can be relaxed; most of versioning tests actually prevents
1744 if (!tree_versionable_function_p (current_function_decl
))
1747 fprintf (dump_file
, "Not splitting: not versionable.\n");
1750 /* FIXME: we could support this. */
1751 if (DECL_STRUCT_FUNCTION (current_function_decl
)->static_chain_decl
)
1754 fprintf (dump_file
, "Not splitting: nested function.\n");
1758 /* See if it makes sense to try to split.
1759 It makes sense to split if we inline, that is if we have direct calls to
1760 handle or direct calls are possibly going to appear as result of indirect
1761 inlining or LTO. Also handle -fprofile-generate as LTO to allow non-LTO
1762 training for LTO -fprofile-use build.
1764 Note that we are not completely conservative about disqualifying functions
1765 called once. It is possible that the caller is called more then once and
1766 then inlining would still benefit. */
1768 /* Local functions called once will be completely inlined most of time. */
1769 || (!node
->callers
->next_caller
&& node
->local
.local
))
1770 && !node
->address_taken
1771 && !node
->has_aliases_p ()
1772 && (!flag_lto
|| !node
->externally_visible
))
1775 fprintf (dump_file
, "Not splitting: not called directly "
1776 "or called once.\n");
1780 /* FIXME: We can actually split if splitting reduces call overhead. */
1781 if (!flag_inline_small_functions
1782 && !DECL_DECLARED_INLINE_P (current_function_decl
))
1785 fprintf (dump_file
, "Not splitting: not autoinlining and function"
1786 " is not inline.\n");
1790 /* We enforce splitting after loop headers when profile info is not
1792 if (profile_status_for_fn (cfun
) != PROFILE_READ
)
1793 mark_dfs_back_edges ();
1795 /* Initialize bitmap to track forbidden calls. */
1796 forbidden_dominators
= BITMAP_ALLOC (NULL
);
1797 calculate_dominance_info (CDI_DOMINATORS
);
1799 /* Compute local info about basic blocks and determine function size/time. */
1800 bb_info_vec
.safe_grow_cleared (last_basic_block_for_fn (cfun
) + 1);
1801 memset (&best_split_point
, 0, sizeof (best_split_point
));
1802 basic_block return_bb
= find_return_bb ();
1803 int tsan_exit_found
= -1;
1804 FOR_EACH_BB_FN (bb
, cfun
)
1808 int freq
= compute_call_stmt_bb_frequency (current_function_decl
, bb
);
1810 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1811 fprintf (dump_file
, "Basic block %i\n", bb
->index
);
1813 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
1815 int this_time
, this_size
;
1816 gimple stmt
= gsi_stmt (bsi
);
1818 this_size
= estimate_num_insns (stmt
, &eni_size_weights
);
1819 this_time
= estimate_num_insns (stmt
, &eni_time_weights
) * freq
;
1822 check_forbidden_calls (stmt
);
1824 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1826 fprintf (dump_file
, " freq:%6i size:%3i time:%3i ",
1827 freq
, this_size
, this_time
);
1828 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1831 if ((flag_sanitize
& SANITIZE_THREAD
)
1832 && is_gimple_call (stmt
)
1833 && gimple_call_internal_p (stmt
)
1834 && gimple_call_internal_fn (stmt
) == IFN_TSAN_FUNC_EXIT
)
1836 /* We handle TSAN_FUNC_EXIT for splitting either in the
1837 return_bb, or in its immediate predecessors. */
1838 if ((bb
!= return_bb
&& !find_edge (bb
, return_bb
))
1839 || (tsan_exit_found
!= -1
1840 && tsan_exit_found
!= (bb
!= return_bb
)))
1843 fprintf (dump_file
, "Not splitting: TSAN_FUNC_EXIT"
1844 " in unexpected basic block.\n");
1845 BITMAP_FREE (forbidden_dominators
);
1846 bb_info_vec
.release ();
1849 tsan_exit_found
= bb
!= return_bb
;
1852 overall_time
+= time
;
1853 overall_size
+= size
;
1854 bb_info_vec
[bb
->index
].time
= time
;
1855 bb_info_vec
[bb
->index
].size
= size
;
1857 find_split_points (return_bb
, overall_time
, overall_size
);
1858 if (best_split_point
.split_bbs
)
1860 split_function (return_bb
, &best_split_point
, tsan_exit_found
== 1);
1861 BITMAP_FREE (best_split_point
.ssa_names_to_pass
);
1862 BITMAP_FREE (best_split_point
.split_bbs
);
1863 todo
= TODO_update_ssa
| TODO_cleanup_cfg
;
1865 BITMAP_FREE (forbidden_dominators
);
1866 bb_info_vec
.release ();
1872 const pass_data pass_data_split_functions
=
1874 GIMPLE_PASS
, /* type */
1875 "fnsplit", /* name */
1876 OPTGROUP_NONE
, /* optinfo_flags */
1877 TV_IPA_FNSPLIT
, /* tv_id */
1878 PROP_cfg
, /* properties_required */
1879 0, /* properties_provided */
1880 0, /* properties_destroyed */
1881 0, /* todo_flags_start */
1882 0, /* todo_flags_finish */
1885 class pass_split_functions
: public gimple_opt_pass
1888 pass_split_functions (gcc::context
*ctxt
)
1889 : gimple_opt_pass (pass_data_split_functions
, ctxt
)
1892 /* opt_pass methods: */
1893 virtual bool gate (function
*);
1894 virtual unsigned int execute (function
*)
1896 return execute_split_functions ();
1899 }; // class pass_split_functions
1902 pass_split_functions::gate (function
*)
1904 /* When doing profile feedback, we want to execute the pass after profiling
1905 is read. So disable one in early optimization. */
1906 return (flag_partial_inlining
1907 && !profile_arc_flag
&& !flag_branch_probabilities
);
1913 make_pass_split_functions (gcc::context
*ctxt
)
1915 return new pass_split_functions (ctxt
);
1918 /* Execute function splitting pass. */
1921 execute_feedback_split_functions (void)
1923 unsigned int retval
= execute_split_functions ();
1925 retval
|= TODO_rebuild_cgraph_edges
;
1931 const pass_data pass_data_feedback_split_functions
=
1933 GIMPLE_PASS
, /* type */
1934 "feedback_fnsplit", /* name */
1935 OPTGROUP_NONE
, /* optinfo_flags */
1936 TV_IPA_FNSPLIT
, /* tv_id */
1937 PROP_cfg
, /* properties_required */
1938 0, /* properties_provided */
1939 0, /* properties_destroyed */
1940 0, /* todo_flags_start */
1941 0, /* todo_flags_finish */
1944 class pass_feedback_split_functions
: public gimple_opt_pass
1947 pass_feedback_split_functions (gcc::context
*ctxt
)
1948 : gimple_opt_pass (pass_data_feedback_split_functions
, ctxt
)
1951 /* opt_pass methods: */
1952 virtual bool gate (function
*);
1953 virtual unsigned int execute (function
*)
1955 return execute_feedback_split_functions ();
1958 }; // class pass_feedback_split_functions
1961 pass_feedback_split_functions::gate (function
*)
1963 /* We don't need to split when profiling at all, we are producing
1964 lousy code anyway. */
1965 return (flag_partial_inlining
1966 && flag_branch_probabilities
);
1972 make_pass_feedback_split_functions (gcc::context
*ctxt
)
1974 return new pass_feedback_split_functions (ctxt
);