1 /* Function splitting pass
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka <jh@suse.cz>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* The purpose of this pass is to split function bodies to improve
22 inlining. I.e. for function of the form:
47 When func becomes inlinable and when cheap_test is often true, inlining func,
48 but not fund.part leads to performance improvement similar as inlining
49 original func while the code size growth is smaller.
51 The pass is organized in three stages:
52 1) Collect local info about basic block into BB_INFO structure and
53 compute function body estimated size and time.
54 2) Via DFS walk find all possible basic blocks where we can split
56 3) If split point is found, split at the specified BB by creating a clone
57 and updating function to call it.
59 The decisions what functions to split are in execute_split_functions
62 There are several possible future improvements for this pass including:
64 1) Splitting to break up large functions
65 2) Splitting to reduce stack frame usage
66 3) Allow split part of function to use values computed in the header part.
67 The values needs to be passed to split function, perhaps via same
68 interface as for nested functions or as argument.
69 4) Support for simple rematerialization. I.e. when split part use
70 value computed in header from function parameter in very cheap way, we
71 can just recompute it.
72 5) Support splitting of nested functions.
73 6) Support non-SSA arguments.
74 7) There is nothing preventing us from producing multiple parts of single function
75 when needed or splitting also the parts. */
79 #include "coretypes.h"
88 #include "fold-const.h"
90 #include "internal-fn.h"
92 #include "insn-config.h"
101 #include "gimplify.h"
102 #include "gimple-iterator.h"
103 #include "gimplify-me.h"
104 #include "gimple-walk.h"
107 #include "alloc-pool.h"
108 #include "symbol-summary.h"
109 #include "ipa-prop.h"
110 #include "tree-cfg.h"
111 #include "tree-into-ssa.h"
112 #include "tree-dfa.h"
113 #include "tree-pass.h"
114 #include "diagnostic.h"
115 #include "tree-dump.h"
116 #include "tree-inline.h"
118 #include "gimple-pretty-print.h"
119 #include "ipa-inline.h"
121 #include "tree-chkp.h"
123 /* Per basic block info. */
131 static vec
<split_bb_info
> bb_info_vec
;
133 /* Description of split point. */
137 /* Size of the partitions. */
138 unsigned int header_time
, header_size
, split_time
, split_size
;
140 /* SSA names that need to be passed into spit function. */
141 bitmap ssa_names_to_pass
;
143 /* Basic block where we split (that will become entry point of new function. */
144 basic_block entry_bb
;
146 /* Basic blocks we are splitting away. */
149 /* True when return value is computed on split part and thus it needs
151 bool split_part_set_retval
;
154 /* Best split point found. */
156 struct split_point best_split_point
;
158 /* Set of basic blocks that are not allowed to dominate a split point. */
160 static bitmap forbidden_dominators
;
162 static tree
find_retval (basic_block return_bb
);
163 static tree
find_retbnd (basic_block return_bb
);
165 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
166 variable, check it if it is present in bitmap passed via DATA. */
169 test_nonssa_use (gimple
, tree t
, tree
, void *data
)
171 t
= get_base_address (t
);
173 if (!t
|| is_gimple_reg (t
))
176 if (TREE_CODE (t
) == PARM_DECL
177 || (TREE_CODE (t
) == VAR_DECL
178 && auto_var_in_fn_p (t
, current_function_decl
))
179 || TREE_CODE (t
) == RESULT_DECL
180 /* Normal labels are part of CFG and will be handled gratefuly.
181 Forced labels however can be used directly by statements and
182 need to stay in one partition along with their uses. */
183 || (TREE_CODE (t
) == LABEL_DECL
184 && FORCED_LABEL (t
)))
185 return bitmap_bit_p ((bitmap
)data
, DECL_UID (t
));
187 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
188 to pretend that the value pointed to is actual result decl. */
189 if ((TREE_CODE (t
) == MEM_REF
|| INDIRECT_REF_P (t
))
190 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
191 && SSA_NAME_VAR (TREE_OPERAND (t
, 0))
192 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t
, 0))) == RESULT_DECL
193 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
195 bitmap_bit_p ((bitmap
)data
,
196 DECL_UID (DECL_RESULT (current_function_decl
)));
201 /* Dump split point CURRENT. */
204 dump_split_point (FILE * file
, struct split_point
*current
)
207 "Split point at BB %i\n"
208 " header time: %i header size: %i\n"
209 " split time: %i split size: %i\n bbs: ",
210 current
->entry_bb
->index
, current
->header_time
,
211 current
->header_size
, current
->split_time
, current
->split_size
);
212 dump_bitmap (file
, current
->split_bbs
);
213 fprintf (file
, " SSA names to pass: ");
214 dump_bitmap (file
, current
->ssa_names_to_pass
);
217 /* Look for all BBs in header that might lead to the split part and verify
218 that they are not defining any non-SSA var used by the split part.
219 Parameters are the same as for consider_split. */
222 verify_non_ssa_vars (struct split_point
*current
, bitmap non_ssa_vars
,
223 basic_block return_bb
)
225 bitmap seen
= BITMAP_ALLOC (NULL
);
226 vec
<basic_block
> worklist
= vNULL
;
232 FOR_EACH_EDGE (e
, ei
, current
->entry_bb
->preds
)
233 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
234 && !bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
236 worklist
.safe_push (e
->src
);
237 bitmap_set_bit (seen
, e
->src
->index
);
240 while (!worklist
.is_empty ())
242 bb
= worklist
.pop ();
243 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
244 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
245 && bitmap_set_bit (seen
, e
->src
->index
))
247 gcc_checking_assert (!bitmap_bit_p (current
->split_bbs
,
249 worklist
.safe_push (e
->src
);
251 for (gimple_stmt_iterator bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
);
254 gimple stmt
= gsi_stmt (bsi
);
255 if (is_gimple_debug (stmt
))
257 if (walk_stmt_load_store_addr_ops
258 (stmt
, non_ssa_vars
, test_nonssa_use
, test_nonssa_use
,
264 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
265 if (test_nonssa_use (stmt
, gimple_label_label (label_stmt
),
266 NULL_TREE
, non_ssa_vars
))
272 for (gphi_iterator bsi
= gsi_start_phis (bb
); !gsi_end_p (bsi
);
275 if (walk_stmt_load_store_addr_ops
276 (gsi_stmt (bsi
), non_ssa_vars
, test_nonssa_use
, test_nonssa_use
,
283 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
285 if (e
->dest
!= return_bb
)
287 for (gphi_iterator bsi
= gsi_start_phis (return_bb
);
291 gphi
*stmt
= bsi
.phi ();
292 tree op
= gimple_phi_arg_def (stmt
, e
->dest_idx
);
294 if (virtual_operand_p (gimple_phi_result (stmt
)))
296 if (TREE_CODE (op
) != SSA_NAME
297 && test_nonssa_use (stmt
, op
, op
, non_ssa_vars
))
306 /* Verify that the rest of function does not define any label
307 used by the split part. */
308 FOR_EACH_BB_FN (bb
, cfun
)
309 if (!bitmap_bit_p (current
->split_bbs
, bb
->index
)
310 && !bitmap_bit_p (seen
, bb
->index
))
312 gimple_stmt_iterator bsi
;
313 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
314 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (bsi
)))
316 if (test_nonssa_use (label_stmt
,
317 gimple_label_label (label_stmt
),
318 NULL_TREE
, non_ssa_vars
))
334 /* If STMT is a call, check the callee against a list of forbidden
335 predicate functions. If a match is found, look for uses of the
336 call result in condition statements that compare against zero.
337 For each such use, find the block targeted by the condition
338 statement for the nonzero result, and set the bit for this block
339 in the forbidden dominators bitmap. The purpose of this is to avoid
340 selecting a split point where we are likely to lose the chance
341 to optimize away an unused function call. */
344 check_forbidden_calls (gimple stmt
)
346 imm_use_iterator use_iter
;
350 /* At the moment, __builtin_constant_p is the only forbidden
351 predicate function call (see PR49642). */
352 if (!gimple_call_builtin_p (stmt
, BUILT_IN_CONSTANT_P
))
355 lhs
= gimple_call_lhs (stmt
);
357 if (!lhs
|| TREE_CODE (lhs
) != SSA_NAME
)
360 FOR_EACH_IMM_USE_FAST (use_p
, use_iter
, lhs
)
363 basic_block use_bb
, forbidden_bb
;
365 edge true_edge
, false_edge
;
368 use_stmt
= dyn_cast
<gcond
*> (USE_STMT (use_p
));
372 /* Assuming canonical form for GIMPLE_COND here, with constant
373 in second position. */
374 op1
= gimple_cond_rhs (use_stmt
);
375 code
= gimple_cond_code (use_stmt
);
376 use_bb
= gimple_bb (use_stmt
);
378 extract_true_false_edges_from_block (use_bb
, &true_edge
, &false_edge
);
380 /* We're only interested in comparisons that distinguish
381 unambiguously from zero. */
382 if (!integer_zerop (op1
) || code
== LE_EXPR
|| code
== GE_EXPR
)
386 forbidden_bb
= false_edge
->dest
;
388 forbidden_bb
= true_edge
->dest
;
390 bitmap_set_bit (forbidden_dominators
, forbidden_bb
->index
);
394 /* If BB is dominated by any block in the forbidden dominators set,
395 return TRUE; else FALSE. */
398 dominated_by_forbidden (basic_block bb
)
403 EXECUTE_IF_SET_IN_BITMAP (forbidden_dominators
, 1, dom_bb
, bi
)
405 if (dominated_by_p (CDI_DOMINATORS
, bb
,
406 BASIC_BLOCK_FOR_FN (cfun
, dom_bb
)))
413 /* For give split point CURRENT and return block RETURN_BB return 1
414 if ssa name VAL is set by split part and 0 otherwise. */
416 split_part_set_ssa_name_p (tree val
, struct split_point
*current
,
417 basic_block return_bb
)
419 if (TREE_CODE (val
) != SSA_NAME
)
422 return (!SSA_NAME_IS_DEFAULT_DEF (val
)
423 && (bitmap_bit_p (current
->split_bbs
,
424 gimple_bb (SSA_NAME_DEF_STMT (val
))->index
)
425 || gimple_bb (SSA_NAME_DEF_STMT (val
)) == return_bb
));
428 /* We found an split_point CURRENT. NON_SSA_VARS is bitmap of all non ssa
429 variables used and RETURN_BB is return basic block.
430 See if we can split function here. */
433 consider_split (struct split_point
*current
, bitmap non_ssa_vars
,
434 basic_block return_bb
)
437 unsigned int num_args
= 0;
438 unsigned int call_overhead
;
443 int incoming_freq
= 0;
446 bool back_edge
= false;
448 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
449 dump_split_point (dump_file
, current
);
451 FOR_EACH_EDGE (e
, ei
, current
->entry_bb
->preds
)
453 if (e
->flags
& EDGE_DFS_BACK
)
455 if (!bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
456 incoming_freq
+= EDGE_FREQUENCY (e
);
459 /* Do not split when we would end up calling function anyway. */
461 >= (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
462 * PARAM_VALUE (PARAM_PARTIAL_INLINING_ENTRY_PROBABILITY
) / 100))
464 /* When profile is guessed, we can not expect it to give us
465 realistic estimate on likelyness of function taking the
466 complex path. As a special case, when tail of the function is
467 a loop, enable splitting since inlining code skipping the loop
468 is likely noticeable win. */
470 && profile_status_for_fn (cfun
) != PROFILE_READ
471 && incoming_freq
< ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
)
473 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
475 " Split before loop, accepting despite low frequencies %i %i.\n",
477 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
);
481 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
483 " Refused: incoming frequency is too large.\n");
488 if (!current
->header_size
)
490 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
491 fprintf (dump_file
, " Refused: header empty\n");
495 /* Verify that PHI args on entry are either virtual or all their operands
496 incoming from header are the same. */
497 for (bsi
= gsi_start_phis (current
->entry_bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
499 gphi
*stmt
= bsi
.phi ();
502 if (virtual_operand_p (gimple_phi_result (stmt
)))
504 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
506 edge e
= gimple_phi_arg_edge (stmt
, i
);
507 if (!bitmap_bit_p (current
->split_bbs
, e
->src
->index
))
509 tree edge_val
= gimple_phi_arg_def (stmt
, i
);
510 if (val
&& edge_val
!= val
)
512 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
514 " Refused: entry BB has PHI with multiple variants\n");
523 /* See what argument we will pass to the split function and compute
525 call_overhead
= eni_size_weights
.call_cost
;
526 for (parm
= DECL_ARGUMENTS (current_function_decl
); parm
;
527 parm
= DECL_CHAIN (parm
))
529 if (!is_gimple_reg (parm
))
531 if (bitmap_bit_p (non_ssa_vars
, DECL_UID (parm
)))
533 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
535 " Refused: need to pass non-ssa param values\n");
541 tree ddef
= ssa_default_def (cfun
, parm
);
543 && bitmap_bit_p (current
->ssa_names_to_pass
,
544 SSA_NAME_VERSION (ddef
)))
546 if (!VOID_TYPE_P (TREE_TYPE (parm
)))
547 call_overhead
+= estimate_move_cost (TREE_TYPE (parm
), false);
552 if (!VOID_TYPE_P (TREE_TYPE (current_function_decl
)))
553 call_overhead
+= estimate_move_cost (TREE_TYPE (current_function_decl
),
556 if (current
->split_size
<= call_overhead
)
558 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
560 " Refused: split size is smaller than call overhead\n");
563 if (current
->header_size
+ call_overhead
564 >= (unsigned int)(DECL_DECLARED_INLINE_P (current_function_decl
)
565 ? MAX_INLINE_INSNS_SINGLE
566 : MAX_INLINE_INSNS_AUTO
))
568 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
570 " Refused: header size is too large for inline candidate\n");
574 /* Splitting functions brings the target out of comdat group; this will
575 lead to code duplication if the function is reused by other unit.
576 Limit this duplication. This is consistent with limit in tree-sra.c
577 FIXME: with LTO we ought to be able to do better! */
578 if (DECL_ONE_ONLY (current_function_decl
)
579 && current
->split_size
>= (unsigned int) MAX_INLINE_INSNS_AUTO
)
581 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
583 " Refused: function is COMDAT and tail is too large\n");
586 /* For comdat functions also reject very small tails; those will likely get
587 inlined back and we do not want to risk the duplication overhead.
588 FIXME: with LTO we ought to be able to do better! */
589 if (DECL_ONE_ONLY (current_function_decl
)
590 && current
->split_size
591 <= (unsigned int) PARAM_VALUE (PARAM_EARLY_INLINING_INSNS
) / 2)
593 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
595 " Refused: function is COMDAT and tail is too small\n");
599 /* FIXME: we currently can pass only SSA function parameters to the split
600 arguments. Once parm_adjustment infrastructure is supported by cloning,
601 we can pass more than that. */
602 if (num_args
!= bitmap_count_bits (current
->ssa_names_to_pass
))
605 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
607 " Refused: need to pass non-param values\n");
611 /* When there are non-ssa vars used in the split region, see if they
612 are used in the header region. If so, reject the split.
613 FIXME: we can use nested function support to access both. */
614 if (!bitmap_empty_p (non_ssa_vars
)
615 && !verify_non_ssa_vars (current
, non_ssa_vars
, return_bb
))
617 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
619 " Refused: split part has non-ssa uses\n");
623 /* If the split point is dominated by a forbidden block, reject
625 if (!bitmap_empty_p (forbidden_dominators
)
626 && dominated_by_forbidden (current
->entry_bb
))
628 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
630 " Refused: split point dominated by forbidden block\n");
634 /* See if retval used by return bb is computed by header or split part.
635 When it is computed by split part, we need to produce return statement
636 in the split part and add code to header to pass it around.
638 This is bit tricky to test:
639 1) When there is no return_bb or no return value, we always pass
641 2) Invariants are always computed by caller.
642 3) For SSA we need to look if defining statement is in header or split part
643 4) For non-SSA we need to look where the var is computed. */
644 retval
= find_retval (return_bb
);
646 current
->split_part_set_retval
= true;
647 else if (is_gimple_min_invariant (retval
))
648 current
->split_part_set_retval
= false;
649 /* Special case is value returned by reference we record as if it was non-ssa
650 set to result_decl. */
651 else if (TREE_CODE (retval
) == SSA_NAME
652 && SSA_NAME_VAR (retval
)
653 && TREE_CODE (SSA_NAME_VAR (retval
)) == RESULT_DECL
654 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
655 current
->split_part_set_retval
656 = bitmap_bit_p (non_ssa_vars
, DECL_UID (SSA_NAME_VAR (retval
)));
657 else if (TREE_CODE (retval
) == SSA_NAME
)
658 current
->split_part_set_retval
659 = split_part_set_ssa_name_p (retval
, current
, return_bb
);
660 else if (TREE_CODE (retval
) == PARM_DECL
)
661 current
->split_part_set_retval
= false;
662 else if (TREE_CODE (retval
) == VAR_DECL
663 || TREE_CODE (retval
) == RESULT_DECL
)
664 current
->split_part_set_retval
665 = bitmap_bit_p (non_ssa_vars
, DECL_UID (retval
));
667 current
->split_part_set_retval
= true;
669 /* See if retbnd used by return bb is computed by header or split part. */
670 retbnd
= find_retbnd (return_bb
);
673 bool split_part_set_retbnd
674 = split_part_set_ssa_name_p (retbnd
, current
, return_bb
);
676 /* If we have both return value and bounds then keep their definitions
677 in a single function. We use SSA names to link returned bounds and
678 value and therefore do not handle cases when result is passed by
679 reference (which should not be our case anyway since bounds are
680 returned for pointers only). */
681 if ((DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
))
682 && current
->split_part_set_retval
)
683 || split_part_set_retbnd
!= current
->split_part_set_retval
)
685 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
687 " Refused: split point splits return value and bounds\n");
692 /* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
693 for the return value. If there are other PHIs, give up. */
694 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
698 for (psi
= gsi_start_phis (return_bb
); !gsi_end_p (psi
); gsi_next (&psi
))
699 if (!virtual_operand_p (gimple_phi_result (psi
.phi ()))
701 && current
->split_part_set_retval
702 && TREE_CODE (retval
) == SSA_NAME
703 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
))
704 && SSA_NAME_DEF_STMT (retval
) == psi
.phi ()))
706 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
708 " Refused: return bb has extra PHIs\n");
713 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
714 fprintf (dump_file
, " Accepted!\n");
716 /* At the moment chose split point with lowest frequency and that leaves
717 out smallest size of header.
718 In future we might re-consider this heuristics. */
719 if (!best_split_point
.split_bbs
720 || best_split_point
.entry_bb
->frequency
> current
->entry_bb
->frequency
721 || (best_split_point
.entry_bb
->frequency
== current
->entry_bb
->frequency
722 && best_split_point
.split_size
< current
->split_size
))
725 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
726 fprintf (dump_file
, " New best split point!\n");
727 if (best_split_point
.ssa_names_to_pass
)
729 BITMAP_FREE (best_split_point
.ssa_names_to_pass
);
730 BITMAP_FREE (best_split_point
.split_bbs
);
732 best_split_point
= *current
;
733 best_split_point
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
734 bitmap_copy (best_split_point
.ssa_names_to_pass
,
735 current
->ssa_names_to_pass
);
736 best_split_point
.split_bbs
= BITMAP_ALLOC (NULL
);
737 bitmap_copy (best_split_point
.split_bbs
, current
->split_bbs
);
741 /* Return basic block containing RETURN statement. We allow basic blocks
745 but return_bb can not be more complex than this (except for
746 -fsanitize=thread we allow TSAN_FUNC_EXIT () internal call in there).
747 If nothing is found, return the exit block.
749 When there are multiple RETURN statement, chose one with return value,
750 since that one is more likely shared by multiple code paths.
752 Return BB is special, because for function splitting it is the only
753 basic block that is duplicated in between header and split part of the
756 TODO: We might support multiple return blocks. */
759 find_return_bb (void)
762 basic_block return_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
);
763 gimple_stmt_iterator bsi
;
764 bool found_return
= false;
765 tree retval
= NULL_TREE
;
767 if (!single_pred_p (EXIT_BLOCK_PTR_FOR_FN (cfun
)))
770 e
= single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
));
771 for (bsi
= gsi_last_bb (e
->src
); !gsi_end_p (bsi
); gsi_prev (&bsi
))
773 gimple stmt
= gsi_stmt (bsi
);
774 if (gimple_code (stmt
) == GIMPLE_LABEL
775 || is_gimple_debug (stmt
)
776 || gimple_clobber_p (stmt
))
778 else if (gimple_code (stmt
) == GIMPLE_ASSIGN
780 && gimple_assign_single_p (stmt
)
781 && (auto_var_in_fn_p (gimple_assign_rhs1 (stmt
),
782 current_function_decl
)
783 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
784 && retval
== gimple_assign_lhs (stmt
))
786 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
789 retval
= gimple_return_retval (return_stmt
);
791 /* For -fsanitize=thread, allow also TSAN_FUNC_EXIT () in the return
793 else if ((flag_sanitize
& SANITIZE_THREAD
)
794 && is_gimple_call (stmt
)
795 && gimple_call_internal_p (stmt
)
796 && gimple_call_internal_fn (stmt
) == IFN_TSAN_FUNC_EXIT
)
801 if (gsi_end_p (bsi
) && found_return
)
807 /* Given return basic block RETURN_BB, see where return value is really
810 find_retval (basic_block return_bb
)
812 gimple_stmt_iterator bsi
;
813 for (bsi
= gsi_start_bb (return_bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
814 if (greturn
*return_stmt
= dyn_cast
<greturn
*> (gsi_stmt (bsi
)))
815 return gimple_return_retval (return_stmt
);
816 else if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_ASSIGN
817 && !gimple_clobber_p (gsi_stmt (bsi
)))
818 return gimple_assign_rhs1 (gsi_stmt (bsi
));
822 /* Given return basic block RETURN_BB, see where return bounds are really
825 find_retbnd (basic_block return_bb
)
827 gimple_stmt_iterator bsi
;
828 for (bsi
= gsi_last_bb (return_bb
); !gsi_end_p (bsi
); gsi_prev (&bsi
))
829 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_RETURN
)
830 return gimple_return_retbnd (gsi_stmt (bsi
));
834 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
835 variable, mark it as used in bitmap passed via DATA.
836 Return true when access to T prevents splitting the function. */
839 mark_nonssa_use (gimple
, tree t
, tree
, void *data
)
841 t
= get_base_address (t
);
843 if (!t
|| is_gimple_reg (t
))
846 /* At present we can't pass non-SSA arguments to split function.
847 FIXME: this can be relaxed by passing references to arguments. */
848 if (TREE_CODE (t
) == PARM_DECL
)
850 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
852 "Cannot split: use of non-ssa function parameter.\n");
856 if ((TREE_CODE (t
) == VAR_DECL
857 && auto_var_in_fn_p (t
, current_function_decl
))
858 || TREE_CODE (t
) == RESULT_DECL
859 || (TREE_CODE (t
) == LABEL_DECL
860 && FORCED_LABEL (t
)))
861 bitmap_set_bit ((bitmap
)data
, DECL_UID (t
));
863 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
864 to pretend that the value pointed to is actual result decl. */
865 if ((TREE_CODE (t
) == MEM_REF
|| INDIRECT_REF_P (t
))
866 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
867 && SSA_NAME_VAR (TREE_OPERAND (t
, 0))
868 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t
, 0))) == RESULT_DECL
869 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
871 bitmap_bit_p ((bitmap
)data
,
872 DECL_UID (DECL_RESULT (current_function_decl
)));
877 /* Compute local properties of basic block BB we collect when looking for
878 split points. We look for ssa defs and store them in SET_SSA_NAMES,
879 for ssa uses and store them in USED_SSA_NAMES and for any non-SSA automatic
880 vars stored in NON_SSA_VARS.
882 When BB has edge to RETURN_BB, collect uses in RETURN_BB too.
884 Return false when BB contains something that prevents it from being put into
888 visit_bb (basic_block bb
, basic_block return_bb
,
889 bitmap set_ssa_names
, bitmap used_ssa_names
,
894 bool can_split
= true;
896 for (gimple_stmt_iterator bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
);
899 gimple stmt
= gsi_stmt (bsi
);
904 if (is_gimple_debug (stmt
))
907 if (gimple_clobber_p (stmt
))
910 /* FIXME: We can split regions containing EH. We can not however
911 split RESX, EH_DISPATCH and EH_POINTER referring to same region
912 into different partitions. This would require tracking of
913 EH regions and checking in consider_split_point if they
914 are not used elsewhere. */
915 if (gimple_code (stmt
) == GIMPLE_RESX
)
917 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
918 fprintf (dump_file
, "Cannot split: resx.\n");
921 if (gimple_code (stmt
) == GIMPLE_EH_DISPATCH
)
923 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
924 fprintf (dump_file
, "Cannot split: eh dispatch.\n");
928 /* Check builtins that prevent splitting. */
929 if (gimple_code (stmt
) == GIMPLE_CALL
930 && (decl
= gimple_call_fndecl (stmt
)) != NULL_TREE
931 && DECL_BUILT_IN (decl
)
932 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
933 switch (DECL_FUNCTION_CODE (decl
))
935 /* FIXME: once we will allow passing non-parm values to split part,
936 we need to be sure to handle correct builtin_stack_save and
937 builtin_stack_restore. At the moment we are safe; there is no
938 way to store builtin_stack_save result in non-SSA variable
939 since all calls to those are compiler generated. */
941 case BUILT_IN_APPLY_ARGS
:
942 case BUILT_IN_VA_START
:
943 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
945 "Cannot split: builtin_apply and va_start.\n");
948 case BUILT_IN_EH_POINTER
:
949 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
950 fprintf (dump_file
, "Cannot split: builtin_eh_pointer.\n");
957 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
958 bitmap_set_bit (set_ssa_names
, SSA_NAME_VERSION (op
));
959 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
960 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
961 can_split
&= !walk_stmt_load_store_addr_ops (stmt
, non_ssa_vars
,
966 for (gphi_iterator bsi
= gsi_start_phis (bb
); !gsi_end_p (bsi
);
969 gphi
*stmt
= bsi
.phi ();
972 if (virtual_operand_p (gimple_phi_result (stmt
)))
974 bitmap_set_bit (set_ssa_names
,
975 SSA_NAME_VERSION (gimple_phi_result (stmt
)));
976 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
978 tree op
= gimple_phi_arg_def (stmt
, i
);
979 if (TREE_CODE (op
) == SSA_NAME
)
980 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
982 can_split
&= !walk_stmt_load_store_addr_ops (stmt
, non_ssa_vars
,
987 /* Record also uses coming from PHI operand in return BB. */
988 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
989 if (e
->dest
== return_bb
)
991 for (gphi_iterator bsi
= gsi_start_phis (return_bb
);
995 gphi
*stmt
= bsi
.phi ();
996 tree op
= gimple_phi_arg_def (stmt
, e
->dest_idx
);
998 if (virtual_operand_p (gimple_phi_result (stmt
)))
1000 if (TREE_CODE (op
) == SSA_NAME
)
1001 bitmap_set_bit (used_ssa_names
, SSA_NAME_VERSION (op
));
1003 can_split
&= !mark_nonssa_use (stmt
, op
, op
, non_ssa_vars
);
1009 /* Stack entry for recursive DFS walk in find_split_point. */
1013 /* Basic block we are examining. */
1016 /* SSA names set and used by the BB and all BBs reachable
1017 from it via DFS walk. */
1018 bitmap set_ssa_names
, used_ssa_names
;
1019 bitmap non_ssa_vars
;
1021 /* All BBS visited from this BB via DFS walk. */
1024 /* Last examined edge in DFS walk. Since we walk unoriented graph,
1025 the value is up to sum of incoming and outgoing edges of BB. */
1026 unsigned int edge_num
;
1028 /* Stack entry index of earliest BB reachable from current BB
1029 or any BB visited later in DFS walk. */
1032 /* Overall time and size of all BBs reached from this BB in DFS walk. */
1033 int overall_time
, overall_size
;
1035 /* When false we can not split on this BB. */
1040 /* Find all articulations and call consider_split on them.
1041 OVERALL_TIME and OVERALL_SIZE is time and size of the function.
1043 We perform basic algorithm for finding an articulation in a graph
1044 created from CFG by considering it to be an unoriented graph.
1046 The articulation is discovered via DFS walk. We collect earliest
1047 basic block on stack that is reachable via backward edge. Articulation
1048 is any basic block such that there is no backward edge bypassing it.
1049 To reduce stack usage we maintain heap allocated stack in STACK vector.
1050 AUX pointer of BB is set to index it appears in the stack or -1 once
1051 it is visited and popped off the stack.
1053 The algorithm finds articulation after visiting the whole component
1054 reachable by it. This makes it convenient to collect information about
1055 the component used by consider_split. */
1058 find_split_points (basic_block return_bb
, int overall_time
, int overall_size
)
1061 vec
<stack_entry
> stack
= vNULL
;
1063 struct split_point current
;
1065 current
.header_time
= overall_time
;
1066 current
.header_size
= overall_size
;
1067 current
.split_time
= 0;
1068 current
.split_size
= 0;
1069 current
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
1071 first
.bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
1073 first
.overall_time
= 0;
1074 first
.overall_size
= 0;
1075 first
.earliest
= INT_MAX
;
1076 first
.set_ssa_names
= 0;
1077 first
.used_ssa_names
= 0;
1078 first
.non_ssa_vars
= 0;
1079 first
.bbs_visited
= 0;
1080 first
.can_split
= false;
1081 stack
.safe_push (first
);
1082 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->aux
= (void *)(intptr_t)-1;
1084 while (!stack
.is_empty ())
1086 stack_entry
*entry
= &stack
.last ();
1088 /* We are walking an acyclic graph, so edge_num counts
1089 succ and pred edges together. However when considering
1090 articulation, we want to have processed everything reachable
1091 from articulation but nothing that reaches into it. */
1092 if (entry
->edge_num
== EDGE_COUNT (entry
->bb
->succs
)
1093 && entry
->bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1095 int pos
= stack
.length ();
1096 entry
->can_split
&= visit_bb (entry
->bb
, return_bb
,
1097 entry
->set_ssa_names
,
1098 entry
->used_ssa_names
,
1099 entry
->non_ssa_vars
);
1100 if (pos
<= entry
->earliest
&& !entry
->can_split
1101 && dump_file
&& (dump_flags
& TDF_DETAILS
))
1103 "found articulation at bb %i but can not split\n",
1105 if (pos
<= entry
->earliest
&& entry
->can_split
)
1107 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1108 fprintf (dump_file
, "found articulation at bb %i\n",
1110 current
.entry_bb
= entry
->bb
;
1111 current
.ssa_names_to_pass
= BITMAP_ALLOC (NULL
);
1112 bitmap_and_compl (current
.ssa_names_to_pass
,
1113 entry
->used_ssa_names
, entry
->set_ssa_names
);
1114 current
.header_time
= overall_time
- entry
->overall_time
;
1115 current
.header_size
= overall_size
- entry
->overall_size
;
1116 current
.split_time
= entry
->overall_time
;
1117 current
.split_size
= entry
->overall_size
;
1118 current
.split_bbs
= entry
->bbs_visited
;
1119 consider_split (¤t
, entry
->non_ssa_vars
, return_bb
);
1120 BITMAP_FREE (current
.ssa_names_to_pass
);
1123 /* Do actual DFS walk. */
1125 < (EDGE_COUNT (entry
->bb
->succs
)
1126 + EDGE_COUNT (entry
->bb
->preds
)))
1130 if (entry
->edge_num
< EDGE_COUNT (entry
->bb
->succs
))
1132 e
= EDGE_SUCC (entry
->bb
, entry
->edge_num
);
1137 e
= EDGE_PRED (entry
->bb
, entry
->edge_num
1138 - EDGE_COUNT (entry
->bb
->succs
));
1144 /* New BB to visit, push it to the stack. */
1145 if (dest
!= return_bb
&& dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
1148 stack_entry new_entry
;
1150 new_entry
.bb
= dest
;
1151 new_entry
.edge_num
= 0;
1152 new_entry
.overall_time
1153 = bb_info_vec
[dest
->index
].time
;
1154 new_entry
.overall_size
1155 = bb_info_vec
[dest
->index
].size
;
1156 new_entry
.earliest
= INT_MAX
;
1157 new_entry
.set_ssa_names
= BITMAP_ALLOC (NULL
);
1158 new_entry
.used_ssa_names
= BITMAP_ALLOC (NULL
);
1159 new_entry
.bbs_visited
= BITMAP_ALLOC (NULL
);
1160 new_entry
.non_ssa_vars
= BITMAP_ALLOC (NULL
);
1161 new_entry
.can_split
= true;
1162 bitmap_set_bit (new_entry
.bbs_visited
, dest
->index
);
1163 stack
.safe_push (new_entry
);
1164 dest
->aux
= (void *)(intptr_t)stack
.length ();
1166 /* Back edge found, record the earliest point. */
1167 else if ((intptr_t)dest
->aux
> 0
1168 && (intptr_t)dest
->aux
< entry
->earliest
)
1169 entry
->earliest
= (intptr_t)dest
->aux
;
1171 /* We are done with examining the edges. Pop off the value from stack
1172 and merge stuff we accumulate during the walk. */
1173 else if (entry
->bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1175 stack_entry
*prev
= &stack
[stack
.length () - 2];
1177 entry
->bb
->aux
= (void *)(intptr_t)-1;
1178 prev
->can_split
&= entry
->can_split
;
1179 if (prev
->set_ssa_names
)
1181 bitmap_ior_into (prev
->set_ssa_names
, entry
->set_ssa_names
);
1182 bitmap_ior_into (prev
->used_ssa_names
, entry
->used_ssa_names
);
1183 bitmap_ior_into (prev
->bbs_visited
, entry
->bbs_visited
);
1184 bitmap_ior_into (prev
->non_ssa_vars
, entry
->non_ssa_vars
);
1186 if (prev
->earliest
> entry
->earliest
)
1187 prev
->earliest
= entry
->earliest
;
1188 prev
->overall_time
+= entry
->overall_time
;
1189 prev
->overall_size
+= entry
->overall_size
;
1190 BITMAP_FREE (entry
->set_ssa_names
);
1191 BITMAP_FREE (entry
->used_ssa_names
);
1192 BITMAP_FREE (entry
->bbs_visited
);
1193 BITMAP_FREE (entry
->non_ssa_vars
);
1199 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->aux
= NULL
;
1200 FOR_EACH_BB_FN (bb
, cfun
)
1203 BITMAP_FREE (current
.ssa_names_to_pass
);
1206 /* Split function at SPLIT_POINT. */
1209 split_function (basic_block return_bb
, struct split_point
*split_point
,
1210 bool add_tsan_func_exit
)
1212 vec
<tree
> args_to_pass
= vNULL
;
1213 bitmap args_to_skip
;
1216 cgraph_node
*node
, *cur_node
= cgraph_node::get (current_function_decl
);
1217 basic_block call_bb
;
1218 gcall
*call
, *tsan_func_exit_call
= NULL
;
1221 tree retval
= NULL
, real_retval
= NULL
, retbnd
= NULL
;
1222 bool split_part_return_p
= false;
1223 bool with_bounds
= chkp_function_instrumented_p (current_function_decl
);
1224 gimple last_stmt
= NULL
;
1227 vec
<tree
, va_gc
> **debug_args
= NULL
;
1231 fprintf (dump_file
, "\n\nSplitting function at:\n");
1232 dump_split_point (dump_file
, split_point
);
1235 if (cur_node
->local
.can_change_signature
)
1236 args_to_skip
= BITMAP_ALLOC (NULL
);
1238 args_to_skip
= NULL
;
1240 /* Collect the parameters of new function and args_to_skip bitmap. */
1241 for (parm
= DECL_ARGUMENTS (current_function_decl
);
1242 parm
; parm
= DECL_CHAIN (parm
), num
++)
1244 && (!is_gimple_reg (parm
)
1245 || (ddef
= ssa_default_def (cfun
, parm
)) == NULL_TREE
1246 || !bitmap_bit_p (split_point
->ssa_names_to_pass
,
1247 SSA_NAME_VERSION (ddef
))))
1248 bitmap_set_bit (args_to_skip
, num
);
1251 /* This parm might not have been used up to now, but is going to be
1252 used, hence register it. */
1253 if (is_gimple_reg (parm
))
1254 arg
= get_or_create_ssa_default_def (cfun
, parm
);
1258 if (!useless_type_conversion_p (DECL_ARG_TYPE (parm
), TREE_TYPE (arg
)))
1259 arg
= fold_convert (DECL_ARG_TYPE (parm
), arg
);
1260 args_to_pass
.safe_push (arg
);
1263 /* See if the split function will return. */
1264 FOR_EACH_EDGE (e
, ei
, return_bb
->preds
)
1265 if (bitmap_bit_p (split_point
->split_bbs
, e
->src
->index
))
1268 split_part_return_p
= true;
1270 /* Add return block to what will become the split function.
1271 We do not return; no return block is needed. */
1272 if (!split_part_return_p
)
1274 /* We have no return block, so nothing is needed. */
1275 else if (return_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1277 /* When we do not want to return value, we need to construct
1278 new return block with empty return statement.
1279 FIXME: Once we are able to change return type, we should change function
1280 to return void instead of just outputting function with undefined return
1281 value. For structures this affects quality of codegen. */
1282 else if (!split_point
->split_part_set_retval
1283 && find_retval (return_bb
))
1285 bool redirected
= true;
1286 basic_block new_return_bb
= create_basic_block (NULL
, 0, return_bb
);
1287 gimple_stmt_iterator gsi
= gsi_start_bb (new_return_bb
);
1288 gsi_insert_after (&gsi
, gimple_build_return (NULL
), GSI_NEW_STMT
);
1292 FOR_EACH_EDGE (e
, ei
, return_bb
->preds
)
1293 if (bitmap_bit_p (split_point
->split_bbs
, e
->src
->index
))
1295 new_return_bb
->count
+= e
->count
;
1296 new_return_bb
->frequency
+= EDGE_FREQUENCY (e
);
1297 redirect_edge_and_branch (e
, new_return_bb
);
1302 e
= make_edge (new_return_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
1303 e
->probability
= REG_BR_PROB_BASE
;
1304 e
->count
= new_return_bb
->count
;
1305 add_bb_to_loop (new_return_bb
, current_loops
->tree_root
);
1306 bitmap_set_bit (split_point
->split_bbs
, new_return_bb
->index
);
1308 /* When we pass around the value, use existing return block. */
1310 bitmap_set_bit (split_point
->split_bbs
, return_bb
->index
);
1312 /* If RETURN_BB has virtual operand PHIs, they must be removed and the
1313 virtual operand marked for renaming as we change the CFG in a way that
1314 tree-inline is not able to compensate for.
1316 Note this can happen whether or not we have a return value. If we have
1317 a return value, then RETURN_BB may have PHIs for real operands too. */
1318 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1321 for (gphi_iterator gsi
= gsi_start_phis (return_bb
);
1324 gphi
*stmt
= gsi
.phi ();
1325 if (!virtual_operand_p (gimple_phi_result (stmt
)))
1330 mark_virtual_phi_result_for_renaming (stmt
);
1331 remove_phi_node (&gsi
, true);
1334 /* In reality we have to rename the reaching definition of the
1335 virtual operand at return_bb as we will eventually release it
1336 when we remove the code region we outlined.
1337 So we have to rename all immediate virtual uses of that region
1338 if we didn't see a PHI definition yet. */
1339 /* ??? In real reality we want to set the reaching vdef of the
1340 entry of the SESE region as the vuse of the call and the reaching
1341 vdef of the exit of the SESE region as the vdef of the call. */
1343 for (gimple_stmt_iterator gsi
= gsi_start_bb (return_bb
);
1347 gimple stmt
= gsi_stmt (gsi
);
1348 if (gimple_vuse (stmt
))
1350 gimple_set_vuse (stmt
, NULL_TREE
);
1353 if (gimple_vdef (stmt
))
1358 /* Now create the actual clone. */
1359 cgraph_edge::rebuild_edges ();
1360 node
= cur_node
->create_version_clone_with_body
1361 (vNULL
, NULL
, args_to_skip
, !split_part_return_p
, split_point
->split_bbs
,
1362 split_point
->entry_bb
, "part");
1364 node
->split_part
= true;
1366 /* Let's take a time profile for splitted function. */
1367 node
->tp_first_run
= cur_node
->tp_first_run
+ 1;
1369 /* For usual cloning it is enough to clear builtin only when signature
1370 changes. For partial inlining we however can not expect the part
1371 of builtin implementation to have same semantic as the whole. */
1372 if (DECL_BUILT_IN (node
->decl
))
1374 DECL_BUILT_IN_CLASS (node
->decl
) = NOT_BUILT_IN
;
1375 DECL_FUNCTION_CODE (node
->decl
) = (enum built_in_function
) 0;
1378 /* If the original function is instrumented then it's
1379 part is also instrumented. */
1381 chkp_function_mark_instrumented (node
->decl
);
1383 /* If the original function is declared inline, there is no point in issuing
1384 a warning for the non-inlinable part. */
1385 DECL_NO_INLINE_WARNING_P (node
->decl
) = 1;
1386 cur_node
->remove_callees ();
1387 cur_node
->remove_all_references ();
1388 if (!split_part_return_p
)
1389 TREE_THIS_VOLATILE (node
->decl
) = 1;
1391 dump_function_to_file (node
->decl
, dump_file
, dump_flags
);
1393 /* Create the basic block we place call into. It is the entry basic block
1394 split after last label. */
1395 call_bb
= split_point
->entry_bb
;
1396 for (gimple_stmt_iterator gsi
= gsi_start_bb (call_bb
); !gsi_end_p (gsi
);)
1397 if (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
)
1399 last_stmt
= gsi_stmt (gsi
);
1404 e
= split_block (split_point
->entry_bb
, last_stmt
);
1407 /* Produce the call statement. */
1408 gimple_stmt_iterator gsi
= gsi_last_bb (call_bb
);
1409 FOR_EACH_VEC_ELT (args_to_pass
, i
, arg
)
1410 if (!is_gimple_val (arg
))
1412 arg
= force_gimple_operand_gsi (&gsi
, arg
, true, NULL_TREE
,
1413 false, GSI_CONTINUE_LINKING
);
1414 args_to_pass
[i
] = arg
;
1416 call
= gimple_build_call_vec (node
->decl
, args_to_pass
);
1417 gimple_call_set_with_bounds (call
, with_bounds
);
1418 gimple_set_block (call
, DECL_INITIAL (current_function_decl
));
1419 args_to_pass
.release ();
1421 /* For optimized away parameters, add on the caller side
1423 DEBUG D#X => parm_Y(D)
1424 stmts and associate D#X with parm in decl_debug_args_lookup
1425 vector to say for debug info that if parameter parm had been passed,
1426 it would have value parm_Y(D). */
1428 for (parm
= DECL_ARGUMENTS (current_function_decl
), num
= 0;
1429 parm
; parm
= DECL_CHAIN (parm
), num
++)
1430 if (bitmap_bit_p (args_to_skip
, num
)
1431 && is_gimple_reg (parm
))
1436 /* This needs to be done even without MAY_HAVE_DEBUG_STMTS,
1437 otherwise if it didn't exist before, we'd end up with
1438 different SSA_NAME_VERSIONs between -g and -g0. */
1439 arg
= get_or_create_ssa_default_def (cfun
, parm
);
1440 if (!MAY_HAVE_DEBUG_STMTS
)
1443 if (debug_args
== NULL
)
1444 debug_args
= decl_debug_args_insert (node
->decl
);
1445 ddecl
= make_node (DEBUG_EXPR_DECL
);
1446 DECL_ARTIFICIAL (ddecl
) = 1;
1447 TREE_TYPE (ddecl
) = TREE_TYPE (parm
);
1448 DECL_MODE (ddecl
) = DECL_MODE (parm
);
1449 vec_safe_push (*debug_args
, DECL_ORIGIN (parm
));
1450 vec_safe_push (*debug_args
, ddecl
);
1451 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
),
1453 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
1455 /* And on the callee side, add
1458 stmts to the first bb where var is a VAR_DECL created for the
1459 optimized away parameter in DECL_INITIAL block. This hints
1460 in the debug info that var (whole DECL_ORIGIN is the parm PARM_DECL)
1461 is optimized away, but could be looked up at the call site
1462 as value of D#X there. */
1463 if (debug_args
!= NULL
)
1467 gimple_stmt_iterator cgsi
;
1470 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
1471 var
= BLOCK_VARS (DECL_INITIAL (node
->decl
));
1472 i
= vec_safe_length (*debug_args
);
1473 cgsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
1477 while (var
!= NULL_TREE
1478 && DECL_ABSTRACT_ORIGIN (var
) != (**debug_args
)[i
])
1479 var
= TREE_CHAIN (var
);
1480 if (var
== NULL_TREE
)
1482 vexpr
= make_node (DEBUG_EXPR_DECL
);
1483 parm
= (**debug_args
)[i
];
1484 DECL_ARTIFICIAL (vexpr
) = 1;
1485 TREE_TYPE (vexpr
) = TREE_TYPE (parm
);
1486 DECL_MODE (vexpr
) = DECL_MODE (parm
);
1487 def_temp
= gimple_build_debug_source_bind (vexpr
, parm
,
1489 gsi_insert_before (&cgsi
, def_temp
, GSI_SAME_STMT
);
1490 def_temp
= gimple_build_debug_bind (var
, vexpr
, NULL
);
1491 gsi_insert_before (&cgsi
, def_temp
, GSI_SAME_STMT
);
1497 /* We avoid address being taken on any variable used by split part,
1498 so return slot optimization is always possible. Moreover this is
1499 required to make DECL_BY_REFERENCE work. */
1500 if (aggregate_value_p (DECL_RESULT (current_function_decl
),
1501 TREE_TYPE (current_function_decl
))
1502 && (!is_gimple_reg_type (TREE_TYPE (DECL_RESULT (current_function_decl
)))
1503 || DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
))))
1504 gimple_call_set_return_slot_opt (call
, true);
1506 if (add_tsan_func_exit
)
1507 tsan_func_exit_call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
1509 /* Update return value. This is bit tricky. When we do not return,
1510 do nothing. When we return we might need to update return_bb
1511 or produce a new return statement. */
1512 if (!split_part_return_p
)
1514 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1515 if (tsan_func_exit_call
)
1516 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1520 e
= make_edge (call_bb
, return_bb
,
1521 return_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
1522 ? 0 : EDGE_FALLTHRU
);
1523 e
->count
= call_bb
->count
;
1524 e
->probability
= REG_BR_PROB_BASE
;
1526 /* If there is return basic block, see what value we need to store
1527 return value into and put call just before it. */
1528 if (return_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1530 real_retval
= retval
= find_retval (return_bb
);
1531 retbnd
= find_retbnd (return_bb
);
1533 if (real_retval
&& split_point
->split_part_set_retval
)
1537 /* See if we need new SSA_NAME for the result.
1538 When DECL_BY_REFERENCE is true, retval is actually pointer to
1539 return value and it is constant in whole function. */
1540 if (TREE_CODE (retval
) == SSA_NAME
1541 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1543 retval
= copy_ssa_name (retval
, call
);
1545 /* See if there is PHI defining return value. */
1546 for (psi
= gsi_start_phis (return_bb
);
1547 !gsi_end_p (psi
); gsi_next (&psi
))
1548 if (!virtual_operand_p (gimple_phi_result (psi
.phi ())))
1551 /* When there is PHI, just update its value. */
1552 if (TREE_CODE (retval
) == SSA_NAME
1553 && !gsi_end_p (psi
))
1554 add_phi_arg (psi
.phi (), retval
, e
, UNKNOWN_LOCATION
);
1555 /* Otherwise update the return BB itself.
1556 find_return_bb allows at most one assignment to return value,
1557 so update first statement. */
1560 gimple_stmt_iterator bsi
;
1561 for (bsi
= gsi_start_bb (return_bb
); !gsi_end_p (bsi
);
1563 if (greturn
*return_stmt
1564 = dyn_cast
<greturn
*> (gsi_stmt (bsi
)))
1566 gimple_return_set_retval (return_stmt
, retval
);
1569 else if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_ASSIGN
1570 && !gimple_clobber_p (gsi_stmt (bsi
)))
1572 gimple_assign_set_rhs1 (gsi_stmt (bsi
), retval
);
1575 update_stmt (gsi_stmt (bsi
));
1578 /* Replace retbnd with new one. */
1581 gimple_stmt_iterator bsi
;
1582 for (bsi
= gsi_last_bb (return_bb
); !gsi_end_p (bsi
);
1584 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_RETURN
)
1586 retbnd
= copy_ssa_name (retbnd
, call
);
1587 gimple_return_set_retbnd (gsi_stmt (bsi
), retbnd
);
1588 update_stmt (gsi_stmt (bsi
));
1593 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1595 gimple_call_set_lhs (call
, build_simple_mem_ref (retval
));
1596 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1601 restype
= TREE_TYPE (DECL_RESULT (current_function_decl
));
1602 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1603 if (!useless_type_conversion_p (TREE_TYPE (retval
), restype
))
1606 tree tem
= create_tmp_reg (restype
);
1607 tem
= make_ssa_name (tem
, call
);
1608 cpy
= gimple_build_assign (retval
, NOP_EXPR
, tem
);
1609 gsi_insert_after (&gsi
, cpy
, GSI_NEW_STMT
);
1612 /* Build bndret call to obtain returned bounds. */
1614 chkp_insert_retbnd_call (retbnd
, retval
, &gsi
);
1615 gimple_call_set_lhs (call
, retval
);
1620 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1621 if (tsan_func_exit_call
)
1622 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1624 /* We don't use return block (there is either no return in function or
1625 multiple of them). So create new basic block with return statement.
1630 if (split_point
->split_part_set_retval
1631 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1633 retval
= DECL_RESULT (current_function_decl
);
1635 if (chkp_function_instrumented_p (current_function_decl
)
1636 && BOUNDED_P (retval
))
1637 retbnd
= create_tmp_reg (pointer_bounds_type_node
);
1639 /* We use temporary register to hold value when aggregate_value_p
1640 is false. Similarly for DECL_BY_REFERENCE we must avoid extra
1642 if (!aggregate_value_p (retval
, TREE_TYPE (current_function_decl
))
1643 && !DECL_BY_REFERENCE (retval
))
1644 retval
= create_tmp_reg (TREE_TYPE (retval
));
1645 if (is_gimple_reg (retval
))
1647 /* When returning by reference, there is only one SSA name
1648 assigned to RESULT_DECL (that is pointer to return value).
1649 Look it up or create new one if it is missing. */
1650 if (DECL_BY_REFERENCE (retval
))
1651 retval
= get_or_create_ssa_default_def (cfun
, retval
);
1652 /* Otherwise produce new SSA name for return value. */
1654 retval
= make_ssa_name (retval
, call
);
1656 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
1657 gimple_call_set_lhs (call
, build_simple_mem_ref (retval
));
1659 gimple_call_set_lhs (call
, retval
);
1661 gsi_insert_after (&gsi
, call
, GSI_NEW_STMT
);
1662 /* Build bndret call to obtain returned bounds. */
1664 chkp_insert_retbnd_call (retbnd
, retval
, &gsi
);
1665 if (tsan_func_exit_call
)
1666 gsi_insert_after (&gsi
, tsan_func_exit_call
, GSI_NEW_STMT
);
1667 ret
= gimple_build_return (retval
);
1668 gsi_insert_after (&gsi
, ret
, GSI_NEW_STMT
);
1671 free_dominance_info (CDI_DOMINATORS
);
1672 free_dominance_info (CDI_POST_DOMINATORS
);
1673 compute_inline_parameters (node
, true);
1676 /* Execute function splitting pass. */
1679 execute_split_functions (void)
1681 gimple_stmt_iterator bsi
;
1683 int overall_time
= 0, overall_size
= 0;
1685 struct cgraph_node
*node
= cgraph_node::get (current_function_decl
);
1687 if (flags_from_decl_or_type (current_function_decl
)
1688 & (ECF_NORETURN
|ECF_MALLOC
))
1691 fprintf (dump_file
, "Not splitting: noreturn/malloc function.\n");
1694 if (MAIN_NAME_P (DECL_NAME (current_function_decl
)))
1697 fprintf (dump_file
, "Not splitting: main function.\n");
1700 /* This can be relaxed; function might become inlinable after splitting
1701 away the uninlinable part. */
1702 if (inline_edge_summary_vec
.exists ()
1703 && !inline_summaries
->get (node
)->inlinable
)
1706 fprintf (dump_file
, "Not splitting: not inlinable.\n");
1709 if (DECL_DISREGARD_INLINE_LIMITS (node
->decl
))
1712 fprintf (dump_file
, "Not splitting: disregarding inline limits.\n");
1715 /* This can be relaxed; most of versioning tests actually prevents
1717 if (!tree_versionable_function_p (current_function_decl
))
1720 fprintf (dump_file
, "Not splitting: not versionable.\n");
1723 /* FIXME: we could support this. */
1724 if (DECL_STRUCT_FUNCTION (current_function_decl
)->static_chain_decl
)
1727 fprintf (dump_file
, "Not splitting: nested function.\n");
1731 /* See if it makes sense to try to split.
1732 It makes sense to split if we inline, that is if we have direct calls to
1733 handle or direct calls are possibly going to appear as result of indirect
1734 inlining or LTO. Also handle -fprofile-generate as LTO to allow non-LTO
1735 training for LTO -fprofile-use build.
1737 Note that we are not completely conservative about disqualifying functions
1738 called once. It is possible that the caller is called more then once and
1739 then inlining would still benefit. */
1741 /* Local functions called once will be completely inlined most of time. */
1742 || (!node
->callers
->next_caller
&& node
->local
.local
))
1743 && !node
->address_taken
1744 && !node
->has_aliases_p ()
1745 && (!flag_lto
|| !node
->externally_visible
))
1748 fprintf (dump_file
, "Not splitting: not called directly "
1749 "or called once.\n");
1753 /* FIXME: We can actually split if splitting reduces call overhead. */
1754 if (!flag_inline_small_functions
1755 && !DECL_DECLARED_INLINE_P (current_function_decl
))
1758 fprintf (dump_file
, "Not splitting: not autoinlining and function"
1759 " is not inline.\n");
1763 /* We enforce splitting after loop headers when profile info is not
1765 if (profile_status_for_fn (cfun
) != PROFILE_READ
)
1766 mark_dfs_back_edges ();
1768 /* Initialize bitmap to track forbidden calls. */
1769 forbidden_dominators
= BITMAP_ALLOC (NULL
);
1770 calculate_dominance_info (CDI_DOMINATORS
);
1772 /* Compute local info about basic blocks and determine function size/time. */
1773 bb_info_vec
.safe_grow_cleared (last_basic_block_for_fn (cfun
) + 1);
1774 memset (&best_split_point
, 0, sizeof (best_split_point
));
1775 basic_block return_bb
= find_return_bb ();
1776 int tsan_exit_found
= -1;
1777 FOR_EACH_BB_FN (bb
, cfun
)
1781 int freq
= compute_call_stmt_bb_frequency (current_function_decl
, bb
);
1783 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1784 fprintf (dump_file
, "Basic block %i\n", bb
->index
);
1786 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
1788 int this_time
, this_size
;
1789 gimple stmt
= gsi_stmt (bsi
);
1791 this_size
= estimate_num_insns (stmt
, &eni_size_weights
);
1792 this_time
= estimate_num_insns (stmt
, &eni_time_weights
) * freq
;
1795 check_forbidden_calls (stmt
);
1797 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1799 fprintf (dump_file
, " freq:%6i size:%3i time:%3i ",
1800 freq
, this_size
, this_time
);
1801 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1804 if ((flag_sanitize
& SANITIZE_THREAD
)
1805 && is_gimple_call (stmt
)
1806 && gimple_call_internal_p (stmt
)
1807 && gimple_call_internal_fn (stmt
) == IFN_TSAN_FUNC_EXIT
)
1809 /* We handle TSAN_FUNC_EXIT for splitting either in the
1810 return_bb, or in its immediate predecessors. */
1811 if ((bb
!= return_bb
&& !find_edge (bb
, return_bb
))
1812 || (tsan_exit_found
!= -1
1813 && tsan_exit_found
!= (bb
!= return_bb
)))
1816 fprintf (dump_file
, "Not splitting: TSAN_FUNC_EXIT"
1817 " in unexpected basic block.\n");
1818 BITMAP_FREE (forbidden_dominators
);
1819 bb_info_vec
.release ();
1822 tsan_exit_found
= bb
!= return_bb
;
1825 overall_time
+= time
;
1826 overall_size
+= size
;
1827 bb_info_vec
[bb
->index
].time
= time
;
1828 bb_info_vec
[bb
->index
].size
= size
;
1830 find_split_points (return_bb
, overall_time
, overall_size
);
1831 if (best_split_point
.split_bbs
)
1833 split_function (return_bb
, &best_split_point
, tsan_exit_found
== 1);
1834 BITMAP_FREE (best_split_point
.ssa_names_to_pass
);
1835 BITMAP_FREE (best_split_point
.split_bbs
);
1836 todo
= TODO_update_ssa
| TODO_cleanup_cfg
;
1838 BITMAP_FREE (forbidden_dominators
);
1839 bb_info_vec
.release ();
1845 const pass_data pass_data_split_functions
=
1847 GIMPLE_PASS
, /* type */
1848 "fnsplit", /* name */
1849 OPTGROUP_NONE
, /* optinfo_flags */
1850 TV_IPA_FNSPLIT
, /* tv_id */
1851 PROP_cfg
, /* properties_required */
1852 0, /* properties_provided */
1853 0, /* properties_destroyed */
1854 0, /* todo_flags_start */
1855 0, /* todo_flags_finish */
1858 class pass_split_functions
: public gimple_opt_pass
1861 pass_split_functions (gcc::context
*ctxt
)
1862 : gimple_opt_pass (pass_data_split_functions
, ctxt
)
1865 /* opt_pass methods: */
1866 virtual bool gate (function
*);
1867 virtual unsigned int execute (function
*)
1869 return execute_split_functions ();
1872 }; // class pass_split_functions
1875 pass_split_functions::gate (function
*)
1877 /* When doing profile feedback, we want to execute the pass after profiling
1878 is read. So disable one in early optimization. */
1879 return (flag_partial_inlining
1880 && !profile_arc_flag
&& !flag_branch_probabilities
);
1886 make_pass_split_functions (gcc::context
*ctxt
)
1888 return new pass_split_functions (ctxt
);
1891 /* Execute function splitting pass. */
1894 execute_feedback_split_functions (void)
1896 unsigned int retval
= execute_split_functions ();
1898 retval
|= TODO_rebuild_cgraph_edges
;
1904 const pass_data pass_data_feedback_split_functions
=
1906 GIMPLE_PASS
, /* type */
1907 "feedback_fnsplit", /* name */
1908 OPTGROUP_NONE
, /* optinfo_flags */
1909 TV_IPA_FNSPLIT
, /* tv_id */
1910 PROP_cfg
, /* properties_required */
1911 0, /* properties_provided */
1912 0, /* properties_destroyed */
1913 0, /* todo_flags_start */
1914 0, /* todo_flags_finish */
1917 class pass_feedback_split_functions
: public gimple_opt_pass
1920 pass_feedback_split_functions (gcc::context
*ctxt
)
1921 : gimple_opt_pass (pass_data_feedback_split_functions
, ctxt
)
1924 /* opt_pass methods: */
1925 virtual bool gate (function
*);
1926 virtual unsigned int execute (function
*)
1928 return execute_feedback_split_functions ();
1931 }; // class pass_feedback_split_functions
1934 pass_feedback_split_functions::gate (function
*)
1936 /* We don't need to split when profiling at all, we are producing
1937 lousy code anyway. */
1938 return (flag_partial_inlining
1939 && flag_branch_probabilities
);
1945 make_pass_feedback_split_functions (gcc::context
*ctxt
)
1947 return new pass_feedback_split_functions (ctxt
);