clang-format: Enhance list of FOR_EACH macros
[official-gcc.git] / gcc / ipa-split.c
blobbe33ddae4125be2fa8f365121fa181d8af3d44d8
1 /* Function splitting pass
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka <jh@suse.cz>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* The purpose of this pass is to split function bodies to improve
22 inlining. I.e. for function of the form:
24 func (...)
26 if (cheap_test)
27 something_small
28 else
29 something_big
32 Produce:
34 func.part (...)
36 something_big
39 func (...)
41 if (cheap_test)
42 something_small
43 else
44 func.part (...);
47 When func becomes inlinable and when cheap_test is often true, inlining func,
48 but not fund.part leads to performance improvement similar as inlining
49 original func while the code size growth is smaller.
51 The pass is organized in three stages:
52 1) Collect local info about basic block into BB_INFO structure and
53 compute function body estimated size and time.
54 2) Via DFS walk find all possible basic blocks where we can split
55 and chose best one.
56 3) If split point is found, split at the specified BB by creating a clone
57 and updating function to call it.
59 The decisions what functions to split are in execute_split_functions
60 and consider_split.
62 There are several possible future improvements for this pass including:
64 1) Splitting to break up large functions
65 2) Splitting to reduce stack frame usage
66 3) Allow split part of function to use values computed in the header part.
67 The values needs to be passed to split function, perhaps via same
68 interface as for nested functions or as argument.
69 4) Support for simple rematerialization. I.e. when split part use
70 value computed in header from function parameter in very cheap way, we
71 can just recompute it.
72 5) Support splitting of nested functions.
73 6) Support non-SSA arguments.
74 7) There is nothing preventing us from producing multiple parts of single function
75 when needed or splitting also the parts. */
77 #include "config.h"
78 #include "system.h"
79 #include "coretypes.h"
80 #include "backend.h"
81 #include "rtl.h"
82 #include "tree.h"
83 #include "gimple.h"
84 #include "cfghooks.h"
85 #include "alloc-pool.h"
86 #include "tree-pass.h"
87 #include "ssa.h"
88 #include "cgraph.h"
89 #include "diagnostic.h"
90 #include "fold-const.h"
91 #include "cfganal.h"
92 #include "calls.h"
93 #include "gimplify.h"
94 #include "gimple-iterator.h"
95 #include "gimplify-me.h"
96 #include "gimple-walk.h"
97 #include "symbol-summary.h"
98 #include "ipa-prop.h"
99 #include "tree-cfg.h"
100 #include "tree-into-ssa.h"
101 #include "tree-dfa.h"
102 #include "tree-inline.h"
103 #include "params.h"
104 #include "gimple-pretty-print.h"
105 #include "ipa-inline.h"
106 #include "cfgloop.h"
107 #include "tree-chkp.h"
109 /* Per basic block info. */
111 struct split_bb_info
113 unsigned int size;
114 unsigned int time;
117 static vec<split_bb_info> bb_info_vec;
119 /* Description of split point. */
121 struct split_point
123 /* Size of the partitions. */
124 unsigned int header_time, header_size, split_time, split_size;
126 /* SSA names that need to be passed into spit function. */
127 bitmap ssa_names_to_pass;
129 /* Basic block where we split (that will become entry point of new function. */
130 basic_block entry_bb;
132 /* Basic blocks we are splitting away. */
133 bitmap split_bbs;
135 /* True when return value is computed on split part and thus it needs
136 to be returned. */
137 bool split_part_set_retval;
140 /* Best split point found. */
142 struct split_point best_split_point;
144 /* Set of basic blocks that are not allowed to dominate a split point. */
146 static bitmap forbidden_dominators;
148 static tree find_retval (basic_block return_bb);
149 static tree find_retbnd (basic_block return_bb);
151 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
152 variable, check it if it is present in bitmap passed via DATA. */
154 static bool
155 test_nonssa_use (gimple *, tree t, tree, void *data)
157 t = get_base_address (t);
159 if (!t || is_gimple_reg (t))
160 return false;
162 if (TREE_CODE (t) == PARM_DECL
163 || (TREE_CODE (t) == VAR_DECL
164 && auto_var_in_fn_p (t, current_function_decl))
165 || TREE_CODE (t) == RESULT_DECL
166 /* Normal labels are part of CFG and will be handled gratefuly.
167 Forced labels however can be used directly by statements and
168 need to stay in one partition along with their uses. */
169 || (TREE_CODE (t) == LABEL_DECL
170 && FORCED_LABEL (t)))
171 return bitmap_bit_p ((bitmap)data, DECL_UID (t));
173 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
174 to pretend that the value pointed to is actual result decl. */
175 if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
176 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
177 && SSA_NAME_VAR (TREE_OPERAND (t, 0))
178 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
179 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
180 return
181 bitmap_bit_p ((bitmap)data,
182 DECL_UID (DECL_RESULT (current_function_decl)));
184 return false;
187 /* Dump split point CURRENT. */
189 static void
190 dump_split_point (FILE * file, struct split_point *current)
192 fprintf (file,
193 "Split point at BB %i\n"
194 " header time: %i header size: %i\n"
195 " split time: %i split size: %i\n bbs: ",
196 current->entry_bb->index, current->header_time,
197 current->header_size, current->split_time, current->split_size);
198 dump_bitmap (file, current->split_bbs);
199 fprintf (file, " SSA names to pass: ");
200 dump_bitmap (file, current->ssa_names_to_pass);
203 /* Look for all BBs in header that might lead to the split part and verify
204 that they are not defining any non-SSA var used by the split part.
205 Parameters are the same as for consider_split. */
207 static bool
208 verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
209 basic_block return_bb)
211 bitmap seen = BITMAP_ALLOC (NULL);
212 vec<basic_block> worklist = vNULL;
213 edge e;
214 edge_iterator ei;
215 bool ok = true;
216 basic_block bb;
218 FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
219 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
220 && !bitmap_bit_p (current->split_bbs, e->src->index))
222 worklist.safe_push (e->src);
223 bitmap_set_bit (seen, e->src->index);
226 while (!worklist.is_empty ())
228 bb = worklist.pop ();
229 FOR_EACH_EDGE (e, ei, bb->preds)
230 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
231 && bitmap_set_bit (seen, e->src->index))
233 gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
234 e->src->index));
235 worklist.safe_push (e->src);
237 for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
238 gsi_next (&bsi))
240 gimple *stmt = gsi_stmt (bsi);
241 if (is_gimple_debug (stmt))
242 continue;
243 if (walk_stmt_load_store_addr_ops
244 (stmt, non_ssa_vars, test_nonssa_use, test_nonssa_use,
245 test_nonssa_use))
247 ok = false;
248 goto done;
250 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
251 if (test_nonssa_use (stmt, gimple_label_label (label_stmt),
252 NULL_TREE, non_ssa_vars))
254 ok = false;
255 goto done;
258 for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
259 gsi_next (&bsi))
261 if (walk_stmt_load_store_addr_ops
262 (gsi_stmt (bsi), non_ssa_vars, test_nonssa_use, test_nonssa_use,
263 test_nonssa_use))
265 ok = false;
266 goto done;
269 FOR_EACH_EDGE (e, ei, bb->succs)
271 if (e->dest != return_bb)
272 continue;
273 for (gphi_iterator bsi = gsi_start_phis (return_bb);
274 !gsi_end_p (bsi);
275 gsi_next (&bsi))
277 gphi *stmt = bsi.phi ();
278 tree op = gimple_phi_arg_def (stmt, e->dest_idx);
280 if (virtual_operand_p (gimple_phi_result (stmt)))
281 continue;
282 if (TREE_CODE (op) != SSA_NAME
283 && test_nonssa_use (stmt, op, op, non_ssa_vars))
285 ok = false;
286 goto done;
292 /* Verify that the rest of function does not define any label
293 used by the split part. */
294 FOR_EACH_BB_FN (bb, cfun)
295 if (!bitmap_bit_p (current->split_bbs, bb->index)
296 && !bitmap_bit_p (seen, bb->index))
298 gimple_stmt_iterator bsi;
299 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
300 if (glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (bsi)))
302 if (test_nonssa_use (label_stmt,
303 gimple_label_label (label_stmt),
304 NULL_TREE, non_ssa_vars))
306 ok = false;
307 goto done;
310 else
311 break;
314 done:
315 BITMAP_FREE (seen);
316 worklist.release ();
317 return ok;
320 /* If STMT is a call, check the callee against a list of forbidden
321 predicate functions. If a match is found, look for uses of the
322 call result in condition statements that compare against zero.
323 For each such use, find the block targeted by the condition
324 statement for the nonzero result, and set the bit for this block
325 in the forbidden dominators bitmap. The purpose of this is to avoid
326 selecting a split point where we are likely to lose the chance
327 to optimize away an unused function call. */
329 static void
330 check_forbidden_calls (gimple *stmt)
332 imm_use_iterator use_iter;
333 use_operand_p use_p;
334 tree lhs;
336 /* At the moment, __builtin_constant_p is the only forbidden
337 predicate function call (see PR49642). */
338 if (!gimple_call_builtin_p (stmt, BUILT_IN_CONSTANT_P))
339 return;
341 lhs = gimple_call_lhs (stmt);
343 if (!lhs || TREE_CODE (lhs) != SSA_NAME)
344 return;
346 FOR_EACH_IMM_USE_FAST (use_p, use_iter, lhs)
348 tree op1;
349 basic_block use_bb, forbidden_bb;
350 enum tree_code code;
351 edge true_edge, false_edge;
352 gcond *use_stmt;
354 use_stmt = dyn_cast <gcond *> (USE_STMT (use_p));
355 if (!use_stmt)
356 continue;
358 /* Assuming canonical form for GIMPLE_COND here, with constant
359 in second position. */
360 op1 = gimple_cond_rhs (use_stmt);
361 code = gimple_cond_code (use_stmt);
362 use_bb = gimple_bb (use_stmt);
364 extract_true_false_edges_from_block (use_bb, &true_edge, &false_edge);
366 /* We're only interested in comparisons that distinguish
367 unambiguously from zero. */
368 if (!integer_zerop (op1) || code == LE_EXPR || code == GE_EXPR)
369 continue;
371 if (code == EQ_EXPR)
372 forbidden_bb = false_edge->dest;
373 else
374 forbidden_bb = true_edge->dest;
376 bitmap_set_bit (forbidden_dominators, forbidden_bb->index);
380 /* If BB is dominated by any block in the forbidden dominators set,
381 return TRUE; else FALSE. */
383 static bool
384 dominated_by_forbidden (basic_block bb)
386 unsigned dom_bb;
387 bitmap_iterator bi;
389 EXECUTE_IF_SET_IN_BITMAP (forbidden_dominators, 1, dom_bb, bi)
391 if (dominated_by_p (CDI_DOMINATORS, bb,
392 BASIC_BLOCK_FOR_FN (cfun, dom_bb)))
393 return true;
396 return false;
399 /* For give split point CURRENT and return block RETURN_BB return 1
400 if ssa name VAL is set by split part and 0 otherwise. */
401 static bool
402 split_part_set_ssa_name_p (tree val, struct split_point *current,
403 basic_block return_bb)
405 if (TREE_CODE (val) != SSA_NAME)
406 return false;
408 return (!SSA_NAME_IS_DEFAULT_DEF (val)
409 && (bitmap_bit_p (current->split_bbs,
410 gimple_bb (SSA_NAME_DEF_STMT (val))->index)
411 || gimple_bb (SSA_NAME_DEF_STMT (val)) == return_bb));
414 /* We found an split_point CURRENT. NON_SSA_VARS is bitmap of all non ssa
415 variables used and RETURN_BB is return basic block.
416 See if we can split function here. */
418 static void
419 consider_split (struct split_point *current, bitmap non_ssa_vars,
420 basic_block return_bb)
422 tree parm;
423 unsigned int num_args = 0;
424 unsigned int call_overhead;
425 edge e;
426 edge_iterator ei;
427 gphi_iterator bsi;
428 unsigned int i;
429 int incoming_freq = 0;
430 tree retval;
431 tree retbnd;
432 bool back_edge = false;
434 if (dump_file && (dump_flags & TDF_DETAILS))
435 dump_split_point (dump_file, current);
437 FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
439 if (e->flags & EDGE_DFS_BACK)
440 back_edge = true;
441 if (!bitmap_bit_p (current->split_bbs, e->src->index))
442 incoming_freq += EDGE_FREQUENCY (e);
445 /* Do not split when we would end up calling function anyway. */
446 if (incoming_freq
447 >= (ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
448 * PARAM_VALUE (PARAM_PARTIAL_INLINING_ENTRY_PROBABILITY) / 100))
450 /* When profile is guessed, we can not expect it to give us
451 realistic estimate on likelyness of function taking the
452 complex path. As a special case, when tail of the function is
453 a loop, enable splitting since inlining code skipping the loop
454 is likely noticeable win. */
455 if (back_edge
456 && profile_status_for_fn (cfun) != PROFILE_READ
457 && incoming_freq < ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency)
459 if (dump_file && (dump_flags & TDF_DETAILS))
460 fprintf (dump_file,
461 " Split before loop, accepting despite low frequencies %i %i.\n",
462 incoming_freq,
463 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency);
465 else
467 if (dump_file && (dump_flags & TDF_DETAILS))
468 fprintf (dump_file,
469 " Refused: incoming frequency is too large.\n");
470 return;
474 if (!current->header_size)
476 if (dump_file && (dump_flags & TDF_DETAILS))
477 fprintf (dump_file, " Refused: header empty\n");
478 return;
481 /* Verify that PHI args on entry are either virtual or all their operands
482 incoming from header are the same. */
483 for (bsi = gsi_start_phis (current->entry_bb); !gsi_end_p (bsi); gsi_next (&bsi))
485 gphi *stmt = bsi.phi ();
486 tree val = NULL;
488 if (virtual_operand_p (gimple_phi_result (stmt)))
489 continue;
490 for (i = 0; i < gimple_phi_num_args (stmt); i++)
492 edge e = gimple_phi_arg_edge (stmt, i);
493 if (!bitmap_bit_p (current->split_bbs, e->src->index))
495 tree edge_val = gimple_phi_arg_def (stmt, i);
496 if (val && edge_val != val)
498 if (dump_file && (dump_flags & TDF_DETAILS))
499 fprintf (dump_file,
500 " Refused: entry BB has PHI with multiple variants\n");
501 return;
503 val = edge_val;
509 /* See what argument we will pass to the split function and compute
510 call overhead. */
511 call_overhead = eni_size_weights.call_cost;
512 for (parm = DECL_ARGUMENTS (current_function_decl); parm;
513 parm = DECL_CHAIN (parm))
515 if (!is_gimple_reg (parm))
517 if (bitmap_bit_p (non_ssa_vars, DECL_UID (parm)))
519 if (dump_file && (dump_flags & TDF_DETAILS))
520 fprintf (dump_file,
521 " Refused: need to pass non-ssa param values\n");
522 return;
525 else
527 tree ddef = ssa_default_def (cfun, parm);
528 if (ddef
529 && bitmap_bit_p (current->ssa_names_to_pass,
530 SSA_NAME_VERSION (ddef)))
532 if (!VOID_TYPE_P (TREE_TYPE (parm)))
533 call_overhead += estimate_move_cost (TREE_TYPE (parm), false);
534 num_args++;
538 if (!VOID_TYPE_P (TREE_TYPE (current_function_decl)))
539 call_overhead += estimate_move_cost (TREE_TYPE (current_function_decl),
540 false);
542 if (current->split_size <= call_overhead)
544 if (dump_file && (dump_flags & TDF_DETAILS))
545 fprintf (dump_file,
546 " Refused: split size is smaller than call overhead\n");
547 return;
549 if (current->header_size + call_overhead
550 >= (unsigned int)(DECL_DECLARED_INLINE_P (current_function_decl)
551 ? MAX_INLINE_INSNS_SINGLE
552 : MAX_INLINE_INSNS_AUTO))
554 if (dump_file && (dump_flags & TDF_DETAILS))
555 fprintf (dump_file,
556 " Refused: header size is too large for inline candidate\n");
557 return;
560 /* Splitting functions brings the target out of comdat group; this will
561 lead to code duplication if the function is reused by other unit.
562 Limit this duplication. This is consistent with limit in tree-sra.c
563 FIXME: with LTO we ought to be able to do better! */
564 if (DECL_ONE_ONLY (current_function_decl)
565 && current->split_size >= (unsigned int) MAX_INLINE_INSNS_AUTO)
567 if (dump_file && (dump_flags & TDF_DETAILS))
568 fprintf (dump_file,
569 " Refused: function is COMDAT and tail is too large\n");
570 return;
572 /* For comdat functions also reject very small tails; those will likely get
573 inlined back and we do not want to risk the duplication overhead.
574 FIXME: with LTO we ought to be able to do better! */
575 if (DECL_ONE_ONLY (current_function_decl)
576 && current->split_size
577 <= (unsigned int) PARAM_VALUE (PARAM_EARLY_INLINING_INSNS) / 2)
579 if (dump_file && (dump_flags & TDF_DETAILS))
580 fprintf (dump_file,
581 " Refused: function is COMDAT and tail is too small\n");
582 return;
585 /* FIXME: we currently can pass only SSA function parameters to the split
586 arguments. Once parm_adjustment infrastructure is supported by cloning,
587 we can pass more than that. */
588 if (num_args != bitmap_count_bits (current->ssa_names_to_pass))
591 if (dump_file && (dump_flags & TDF_DETAILS))
592 fprintf (dump_file,
593 " Refused: need to pass non-param values\n");
594 return;
597 /* When there are non-ssa vars used in the split region, see if they
598 are used in the header region. If so, reject the split.
599 FIXME: we can use nested function support to access both. */
600 if (!bitmap_empty_p (non_ssa_vars)
601 && !verify_non_ssa_vars (current, non_ssa_vars, return_bb))
603 if (dump_file && (dump_flags & TDF_DETAILS))
604 fprintf (dump_file,
605 " Refused: split part has non-ssa uses\n");
606 return;
609 /* If the split point is dominated by a forbidden block, reject
610 the split. */
611 if (!bitmap_empty_p (forbidden_dominators)
612 && dominated_by_forbidden (current->entry_bb))
614 if (dump_file && (dump_flags & TDF_DETAILS))
615 fprintf (dump_file,
616 " Refused: split point dominated by forbidden block\n");
617 return;
620 /* See if retval used by return bb is computed by header or split part.
621 When it is computed by split part, we need to produce return statement
622 in the split part and add code to header to pass it around.
624 This is bit tricky to test:
625 1) When there is no return_bb or no return value, we always pass
626 value around.
627 2) Invariants are always computed by caller.
628 3) For SSA we need to look if defining statement is in header or split part
629 4) For non-SSA we need to look where the var is computed. */
630 retval = find_retval (return_bb);
631 if (!retval)
632 current->split_part_set_retval = true;
633 else if (is_gimple_min_invariant (retval))
634 current->split_part_set_retval = false;
635 /* Special case is value returned by reference we record as if it was non-ssa
636 set to result_decl. */
637 else if (TREE_CODE (retval) == SSA_NAME
638 && SSA_NAME_VAR (retval)
639 && TREE_CODE (SSA_NAME_VAR (retval)) == RESULT_DECL
640 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
641 current->split_part_set_retval
642 = bitmap_bit_p (non_ssa_vars, DECL_UID (SSA_NAME_VAR (retval)));
643 else if (TREE_CODE (retval) == SSA_NAME)
644 current->split_part_set_retval
645 = split_part_set_ssa_name_p (retval, current, return_bb);
646 else if (TREE_CODE (retval) == PARM_DECL)
647 current->split_part_set_retval = false;
648 else if (TREE_CODE (retval) == VAR_DECL
649 || TREE_CODE (retval) == RESULT_DECL)
650 current->split_part_set_retval
651 = bitmap_bit_p (non_ssa_vars, DECL_UID (retval));
652 else
653 current->split_part_set_retval = true;
655 /* See if retbnd used by return bb is computed by header or split part. */
656 retbnd = find_retbnd (return_bb);
657 if (retbnd)
659 bool split_part_set_retbnd
660 = split_part_set_ssa_name_p (retbnd, current, return_bb);
662 /* If we have both return value and bounds then keep their definitions
663 in a single function. We use SSA names to link returned bounds and
664 value and therefore do not handle cases when result is passed by
665 reference (which should not be our case anyway since bounds are
666 returned for pointers only). */
667 if ((DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))
668 && current->split_part_set_retval)
669 || split_part_set_retbnd != current->split_part_set_retval)
671 if (dump_file && (dump_flags & TDF_DETAILS))
672 fprintf (dump_file,
673 " Refused: split point splits return value and bounds\n");
674 return;
678 /* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
679 for the return value. If there are other PHIs, give up. */
680 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
682 gphi_iterator psi;
684 for (psi = gsi_start_phis (return_bb); !gsi_end_p (psi); gsi_next (&psi))
685 if (!virtual_operand_p (gimple_phi_result (psi.phi ()))
686 && !(retval
687 && current->split_part_set_retval
688 && TREE_CODE (retval) == SSA_NAME
689 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))
690 && SSA_NAME_DEF_STMT (retval) == psi.phi ()))
692 if (dump_file && (dump_flags & TDF_DETAILS))
693 fprintf (dump_file,
694 " Refused: return bb has extra PHIs\n");
695 return;
699 if (dump_file && (dump_flags & TDF_DETAILS))
700 fprintf (dump_file, " Accepted!\n");
702 /* At the moment chose split point with lowest frequency and that leaves
703 out smallest size of header.
704 In future we might re-consider this heuristics. */
705 if (!best_split_point.split_bbs
706 || best_split_point.entry_bb->frequency > current->entry_bb->frequency
707 || (best_split_point.entry_bb->frequency == current->entry_bb->frequency
708 && best_split_point.split_size < current->split_size))
711 if (dump_file && (dump_flags & TDF_DETAILS))
712 fprintf (dump_file, " New best split point!\n");
713 if (best_split_point.ssa_names_to_pass)
715 BITMAP_FREE (best_split_point.ssa_names_to_pass);
716 BITMAP_FREE (best_split_point.split_bbs);
718 best_split_point = *current;
719 best_split_point.ssa_names_to_pass = BITMAP_ALLOC (NULL);
720 bitmap_copy (best_split_point.ssa_names_to_pass,
721 current->ssa_names_to_pass);
722 best_split_point.split_bbs = BITMAP_ALLOC (NULL);
723 bitmap_copy (best_split_point.split_bbs, current->split_bbs);
727 /* Return basic block containing RETURN statement. We allow basic blocks
728 of the form:
729 <retval> = tmp_var;
730 return <retval>
731 but return_bb can not be more complex than this (except for
732 -fsanitize=thread we allow TSAN_FUNC_EXIT () internal call in there).
733 If nothing is found, return the exit block.
735 When there are multiple RETURN statement, chose one with return value,
736 since that one is more likely shared by multiple code paths.
738 Return BB is special, because for function splitting it is the only
739 basic block that is duplicated in between header and split part of the
740 function.
742 TODO: We might support multiple return blocks. */
744 static basic_block
745 find_return_bb (void)
747 edge e;
748 basic_block return_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
749 gimple_stmt_iterator bsi;
750 bool found_return = false;
751 tree retval = NULL_TREE;
753 if (!single_pred_p (EXIT_BLOCK_PTR_FOR_FN (cfun)))
754 return return_bb;
756 e = single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun));
757 for (bsi = gsi_last_bb (e->src); !gsi_end_p (bsi); gsi_prev (&bsi))
759 gimple *stmt = gsi_stmt (bsi);
760 if (gimple_code (stmt) == GIMPLE_LABEL
761 || is_gimple_debug (stmt)
762 || gimple_clobber_p (stmt))
764 else if (gimple_code (stmt) == GIMPLE_ASSIGN
765 && found_return
766 && gimple_assign_single_p (stmt)
767 && (auto_var_in_fn_p (gimple_assign_rhs1 (stmt),
768 current_function_decl)
769 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
770 && retval == gimple_assign_lhs (stmt))
772 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
774 found_return = true;
775 retval = gimple_return_retval (return_stmt);
777 /* For -fsanitize=thread, allow also TSAN_FUNC_EXIT () in the return
778 bb. */
779 else if ((flag_sanitize & SANITIZE_THREAD)
780 && is_gimple_call (stmt)
781 && gimple_call_internal_p (stmt)
782 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
784 else
785 break;
787 if (gsi_end_p (bsi) && found_return)
788 return_bb = e->src;
790 return return_bb;
793 /* Given return basic block RETURN_BB, see where return value is really
794 stored. */
795 static tree
796 find_retval (basic_block return_bb)
798 gimple_stmt_iterator bsi;
799 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi); gsi_next (&bsi))
800 if (greturn *return_stmt = dyn_cast <greturn *> (gsi_stmt (bsi)))
801 return gimple_return_retval (return_stmt);
802 else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
803 && !gimple_clobber_p (gsi_stmt (bsi)))
804 return gimple_assign_rhs1 (gsi_stmt (bsi));
805 return NULL;
808 /* Given return basic block RETURN_BB, see where return bounds are really
809 stored. */
810 static tree
811 find_retbnd (basic_block return_bb)
813 gimple_stmt_iterator bsi;
814 for (bsi = gsi_last_bb (return_bb); !gsi_end_p (bsi); gsi_prev (&bsi))
815 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_RETURN)
816 return gimple_return_retbnd (gsi_stmt (bsi));
817 return NULL;
820 /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
821 variable, mark it as used in bitmap passed via DATA.
822 Return true when access to T prevents splitting the function. */
824 static bool
825 mark_nonssa_use (gimple *, tree t, tree, void *data)
827 t = get_base_address (t);
829 if (!t || is_gimple_reg (t))
830 return false;
832 /* At present we can't pass non-SSA arguments to split function.
833 FIXME: this can be relaxed by passing references to arguments. */
834 if (TREE_CODE (t) == PARM_DECL)
836 if (dump_file && (dump_flags & TDF_DETAILS))
837 fprintf (dump_file,
838 "Cannot split: use of non-ssa function parameter.\n");
839 return true;
842 if ((TREE_CODE (t) == VAR_DECL
843 && auto_var_in_fn_p (t, current_function_decl))
844 || TREE_CODE (t) == RESULT_DECL
845 || (TREE_CODE (t) == LABEL_DECL
846 && FORCED_LABEL (t)))
847 bitmap_set_bit ((bitmap)data, DECL_UID (t));
849 /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
850 to pretend that the value pointed to is actual result decl. */
851 if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
852 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
853 && SSA_NAME_VAR (TREE_OPERAND (t, 0))
854 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
855 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
856 return
857 bitmap_bit_p ((bitmap)data,
858 DECL_UID (DECL_RESULT (current_function_decl)));
860 return false;
863 /* Compute local properties of basic block BB we collect when looking for
864 split points. We look for ssa defs and store them in SET_SSA_NAMES,
865 for ssa uses and store them in USED_SSA_NAMES and for any non-SSA automatic
866 vars stored in NON_SSA_VARS.
868 When BB has edge to RETURN_BB, collect uses in RETURN_BB too.
870 Return false when BB contains something that prevents it from being put into
871 split function. */
873 static bool
874 visit_bb (basic_block bb, basic_block return_bb,
875 bitmap set_ssa_names, bitmap used_ssa_names,
876 bitmap non_ssa_vars)
878 edge e;
879 edge_iterator ei;
880 bool can_split = true;
882 for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
883 gsi_next (&bsi))
885 gimple *stmt = gsi_stmt (bsi);
886 tree op;
887 ssa_op_iter iter;
888 tree decl;
890 if (is_gimple_debug (stmt))
891 continue;
893 if (gimple_clobber_p (stmt))
894 continue;
896 /* FIXME: We can split regions containing EH. We can not however
897 split RESX, EH_DISPATCH and EH_POINTER referring to same region
898 into different partitions. This would require tracking of
899 EH regions and checking in consider_split_point if they
900 are not used elsewhere. */
901 if (gimple_code (stmt) == GIMPLE_RESX)
903 if (dump_file && (dump_flags & TDF_DETAILS))
904 fprintf (dump_file, "Cannot split: resx.\n");
905 can_split = false;
907 if (gimple_code (stmt) == GIMPLE_EH_DISPATCH)
909 if (dump_file && (dump_flags & TDF_DETAILS))
910 fprintf (dump_file, "Cannot split: eh dispatch.\n");
911 can_split = false;
914 /* Check builtins that prevent splitting. */
915 if (gimple_code (stmt) == GIMPLE_CALL
916 && (decl = gimple_call_fndecl (stmt)) != NULL_TREE
917 && DECL_BUILT_IN (decl)
918 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
919 switch (DECL_FUNCTION_CODE (decl))
921 /* FIXME: once we will allow passing non-parm values to split part,
922 we need to be sure to handle correct builtin_stack_save and
923 builtin_stack_restore. At the moment we are safe; there is no
924 way to store builtin_stack_save result in non-SSA variable
925 since all calls to those are compiler generated. */
926 case BUILT_IN_APPLY:
927 case BUILT_IN_APPLY_ARGS:
928 case BUILT_IN_VA_START:
929 if (dump_file && (dump_flags & TDF_DETAILS))
930 fprintf (dump_file,
931 "Cannot split: builtin_apply and va_start.\n");
932 can_split = false;
933 break;
934 case BUILT_IN_EH_POINTER:
935 if (dump_file && (dump_flags & TDF_DETAILS))
936 fprintf (dump_file, "Cannot split: builtin_eh_pointer.\n");
937 can_split = false;
938 break;
939 default:
940 break;
943 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
944 bitmap_set_bit (set_ssa_names, SSA_NAME_VERSION (op));
945 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
946 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
947 can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
948 mark_nonssa_use,
949 mark_nonssa_use,
950 mark_nonssa_use);
952 for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
953 gsi_next (&bsi))
955 gphi *stmt = bsi.phi ();
956 unsigned int i;
958 if (virtual_operand_p (gimple_phi_result (stmt)))
959 continue;
960 bitmap_set_bit (set_ssa_names,
961 SSA_NAME_VERSION (gimple_phi_result (stmt)));
962 for (i = 0; i < gimple_phi_num_args (stmt); i++)
964 tree op = gimple_phi_arg_def (stmt, i);
965 if (TREE_CODE (op) == SSA_NAME)
966 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
968 can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
969 mark_nonssa_use,
970 mark_nonssa_use,
971 mark_nonssa_use);
973 /* Record also uses coming from PHI operand in return BB. */
974 FOR_EACH_EDGE (e, ei, bb->succs)
975 if (e->dest == return_bb)
977 for (gphi_iterator bsi = gsi_start_phis (return_bb);
978 !gsi_end_p (bsi);
979 gsi_next (&bsi))
981 gphi *stmt = bsi.phi ();
982 tree op = gimple_phi_arg_def (stmt, e->dest_idx);
984 if (virtual_operand_p (gimple_phi_result (stmt)))
985 continue;
986 if (TREE_CODE (op) == SSA_NAME)
987 bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
988 else
989 can_split &= !mark_nonssa_use (stmt, op, op, non_ssa_vars);
992 return can_split;
995 /* Stack entry for recursive DFS walk in find_split_point. */
997 struct stack_entry
999 /* Basic block we are examining. */
1000 basic_block bb;
1002 /* SSA names set and used by the BB and all BBs reachable
1003 from it via DFS walk. */
1004 bitmap set_ssa_names, used_ssa_names;
1005 bitmap non_ssa_vars;
1007 /* All BBS visited from this BB via DFS walk. */
1008 bitmap bbs_visited;
1010 /* Last examined edge in DFS walk. Since we walk unoriented graph,
1011 the value is up to sum of incoming and outgoing edges of BB. */
1012 unsigned int edge_num;
1014 /* Stack entry index of earliest BB reachable from current BB
1015 or any BB visited later in DFS walk. */
1016 int earliest;
1018 /* Overall time and size of all BBs reached from this BB in DFS walk. */
1019 int overall_time, overall_size;
1021 /* When false we can not split on this BB. */
1022 bool can_split;
1026 /* Find all articulations and call consider_split on them.
1027 OVERALL_TIME and OVERALL_SIZE is time and size of the function.
1029 We perform basic algorithm for finding an articulation in a graph
1030 created from CFG by considering it to be an unoriented graph.
1032 The articulation is discovered via DFS walk. We collect earliest
1033 basic block on stack that is reachable via backward edge. Articulation
1034 is any basic block such that there is no backward edge bypassing it.
1035 To reduce stack usage we maintain heap allocated stack in STACK vector.
1036 AUX pointer of BB is set to index it appears in the stack or -1 once
1037 it is visited and popped off the stack.
1039 The algorithm finds articulation after visiting the whole component
1040 reachable by it. This makes it convenient to collect information about
1041 the component used by consider_split. */
1043 static void
1044 find_split_points (basic_block return_bb, int overall_time, int overall_size)
1046 stack_entry first;
1047 vec<stack_entry> stack = vNULL;
1048 basic_block bb;
1049 struct split_point current;
1051 current.header_time = overall_time;
1052 current.header_size = overall_size;
1053 current.split_time = 0;
1054 current.split_size = 0;
1055 current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
1057 first.bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1058 first.edge_num = 0;
1059 first.overall_time = 0;
1060 first.overall_size = 0;
1061 first.earliest = INT_MAX;
1062 first.set_ssa_names = 0;
1063 first.used_ssa_names = 0;
1064 first.non_ssa_vars = 0;
1065 first.bbs_visited = 0;
1066 first.can_split = false;
1067 stack.safe_push (first);
1068 ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = (void *)(intptr_t)-1;
1070 while (!stack.is_empty ())
1072 stack_entry *entry = &stack.last ();
1074 /* We are walking an acyclic graph, so edge_num counts
1075 succ and pred edges together. However when considering
1076 articulation, we want to have processed everything reachable
1077 from articulation but nothing that reaches into it. */
1078 if (entry->edge_num == EDGE_COUNT (entry->bb->succs)
1079 && entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1081 int pos = stack.length ();
1082 entry->can_split &= visit_bb (entry->bb, return_bb,
1083 entry->set_ssa_names,
1084 entry->used_ssa_names,
1085 entry->non_ssa_vars);
1086 if (pos <= entry->earliest && !entry->can_split
1087 && dump_file && (dump_flags & TDF_DETAILS))
1088 fprintf (dump_file,
1089 "found articulation at bb %i but can not split\n",
1090 entry->bb->index);
1091 if (pos <= entry->earliest && entry->can_split)
1093 if (dump_file && (dump_flags & TDF_DETAILS))
1094 fprintf (dump_file, "found articulation at bb %i\n",
1095 entry->bb->index);
1096 current.entry_bb = entry->bb;
1097 current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
1098 bitmap_and_compl (current.ssa_names_to_pass,
1099 entry->used_ssa_names, entry->set_ssa_names);
1100 current.header_time = overall_time - entry->overall_time;
1101 current.header_size = overall_size - entry->overall_size;
1102 current.split_time = entry->overall_time;
1103 current.split_size = entry->overall_size;
1104 current.split_bbs = entry->bbs_visited;
1105 consider_split (&current, entry->non_ssa_vars, return_bb);
1106 BITMAP_FREE (current.ssa_names_to_pass);
1109 /* Do actual DFS walk. */
1110 if (entry->edge_num
1111 < (EDGE_COUNT (entry->bb->succs)
1112 + EDGE_COUNT (entry->bb->preds)))
1114 edge e;
1115 basic_block dest;
1116 if (entry->edge_num < EDGE_COUNT (entry->bb->succs))
1118 e = EDGE_SUCC (entry->bb, entry->edge_num);
1119 dest = e->dest;
1121 else
1123 e = EDGE_PRED (entry->bb, entry->edge_num
1124 - EDGE_COUNT (entry->bb->succs));
1125 dest = e->src;
1128 entry->edge_num++;
1130 /* New BB to visit, push it to the stack. */
1131 if (dest != return_bb && dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1132 && !dest->aux)
1134 stack_entry new_entry;
1136 new_entry.bb = dest;
1137 new_entry.edge_num = 0;
1138 new_entry.overall_time
1139 = bb_info_vec[dest->index].time;
1140 new_entry.overall_size
1141 = bb_info_vec[dest->index].size;
1142 new_entry.earliest = INT_MAX;
1143 new_entry.set_ssa_names = BITMAP_ALLOC (NULL);
1144 new_entry.used_ssa_names = BITMAP_ALLOC (NULL);
1145 new_entry.bbs_visited = BITMAP_ALLOC (NULL);
1146 new_entry.non_ssa_vars = BITMAP_ALLOC (NULL);
1147 new_entry.can_split = true;
1148 bitmap_set_bit (new_entry.bbs_visited, dest->index);
1149 stack.safe_push (new_entry);
1150 dest->aux = (void *)(intptr_t)stack.length ();
1152 /* Back edge found, record the earliest point. */
1153 else if ((intptr_t)dest->aux > 0
1154 && (intptr_t)dest->aux < entry->earliest)
1155 entry->earliest = (intptr_t)dest->aux;
1157 /* We are done with examining the edges. Pop off the value from stack
1158 and merge stuff we accumulate during the walk. */
1159 else if (entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1161 stack_entry *prev = &stack[stack.length () - 2];
1163 entry->bb->aux = (void *)(intptr_t)-1;
1164 prev->can_split &= entry->can_split;
1165 if (prev->set_ssa_names)
1167 bitmap_ior_into (prev->set_ssa_names, entry->set_ssa_names);
1168 bitmap_ior_into (prev->used_ssa_names, entry->used_ssa_names);
1169 bitmap_ior_into (prev->bbs_visited, entry->bbs_visited);
1170 bitmap_ior_into (prev->non_ssa_vars, entry->non_ssa_vars);
1172 if (prev->earliest > entry->earliest)
1173 prev->earliest = entry->earliest;
1174 prev->overall_time += entry->overall_time;
1175 prev->overall_size += entry->overall_size;
1176 BITMAP_FREE (entry->set_ssa_names);
1177 BITMAP_FREE (entry->used_ssa_names);
1178 BITMAP_FREE (entry->bbs_visited);
1179 BITMAP_FREE (entry->non_ssa_vars);
1180 stack.pop ();
1182 else
1183 stack.pop ();
1185 ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = NULL;
1186 FOR_EACH_BB_FN (bb, cfun)
1187 bb->aux = NULL;
1188 stack.release ();
1189 BITMAP_FREE (current.ssa_names_to_pass);
1192 /* Split function at SPLIT_POINT. */
1194 static void
1195 split_function (basic_block return_bb, struct split_point *split_point,
1196 bool add_tsan_func_exit)
1198 vec<tree> args_to_pass = vNULL;
1199 bitmap args_to_skip;
1200 tree parm;
1201 int num = 0;
1202 cgraph_node *node, *cur_node = cgraph_node::get (current_function_decl);
1203 basic_block call_bb;
1204 gcall *call, *tsan_func_exit_call = NULL;
1205 edge e;
1206 edge_iterator ei;
1207 tree retval = NULL, real_retval = NULL, retbnd = NULL;
1208 bool split_part_return_p = false;
1209 bool with_bounds = chkp_function_instrumented_p (current_function_decl);
1210 gimple *last_stmt = NULL;
1211 unsigned int i;
1212 tree arg, ddef;
1213 vec<tree, va_gc> **debug_args = NULL;
1215 if (dump_file)
1217 fprintf (dump_file, "\n\nSplitting function at:\n");
1218 dump_split_point (dump_file, split_point);
1221 if (cur_node->local.can_change_signature)
1222 args_to_skip = BITMAP_ALLOC (NULL);
1223 else
1224 args_to_skip = NULL;
1226 /* Collect the parameters of new function and args_to_skip bitmap. */
1227 for (parm = DECL_ARGUMENTS (current_function_decl);
1228 parm; parm = DECL_CHAIN (parm), num++)
1229 if (args_to_skip
1230 && (!is_gimple_reg (parm)
1231 || (ddef = ssa_default_def (cfun, parm)) == NULL_TREE
1232 || !bitmap_bit_p (split_point->ssa_names_to_pass,
1233 SSA_NAME_VERSION (ddef))))
1234 bitmap_set_bit (args_to_skip, num);
1235 else
1237 /* This parm might not have been used up to now, but is going to be
1238 used, hence register it. */
1239 if (is_gimple_reg (parm))
1240 arg = get_or_create_ssa_default_def (cfun, parm);
1241 else
1242 arg = parm;
1244 if (!useless_type_conversion_p (DECL_ARG_TYPE (parm), TREE_TYPE (arg)))
1245 arg = fold_convert (DECL_ARG_TYPE (parm), arg);
1246 args_to_pass.safe_push (arg);
1249 /* See if the split function will return. */
1250 FOR_EACH_EDGE (e, ei, return_bb->preds)
1251 if (bitmap_bit_p (split_point->split_bbs, e->src->index))
1252 break;
1253 if (e)
1254 split_part_return_p = true;
1256 /* Add return block to what will become the split function.
1257 We do not return; no return block is needed. */
1258 if (!split_part_return_p)
1260 /* We have no return block, so nothing is needed. */
1261 else if (return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1263 /* When we do not want to return value, we need to construct
1264 new return block with empty return statement.
1265 FIXME: Once we are able to change return type, we should change function
1266 to return void instead of just outputting function with undefined return
1267 value. For structures this affects quality of codegen. */
1268 else if (!split_point->split_part_set_retval
1269 && find_retval (return_bb))
1271 bool redirected = true;
1272 basic_block new_return_bb = create_basic_block (NULL, 0, return_bb);
1273 gimple_stmt_iterator gsi = gsi_start_bb (new_return_bb);
1274 gsi_insert_after (&gsi, gimple_build_return (NULL), GSI_NEW_STMT);
1275 while (redirected)
1277 redirected = false;
1278 FOR_EACH_EDGE (e, ei, return_bb->preds)
1279 if (bitmap_bit_p (split_point->split_bbs, e->src->index))
1281 new_return_bb->count += e->count;
1282 new_return_bb->frequency += EDGE_FREQUENCY (e);
1283 redirect_edge_and_branch (e, new_return_bb);
1284 redirected = true;
1285 break;
1288 e = make_edge (new_return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1289 e->probability = REG_BR_PROB_BASE;
1290 e->count = new_return_bb->count;
1291 add_bb_to_loop (new_return_bb, current_loops->tree_root);
1292 bitmap_set_bit (split_point->split_bbs, new_return_bb->index);
1294 /* When we pass around the value, use existing return block. */
1295 else
1296 bitmap_set_bit (split_point->split_bbs, return_bb->index);
1298 /* If RETURN_BB has virtual operand PHIs, they must be removed and the
1299 virtual operand marked for renaming as we change the CFG in a way that
1300 tree-inline is not able to compensate for.
1302 Note this can happen whether or not we have a return value. If we have
1303 a return value, then RETURN_BB may have PHIs for real operands too. */
1304 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1306 bool phi_p = false;
1307 for (gphi_iterator gsi = gsi_start_phis (return_bb);
1308 !gsi_end_p (gsi);)
1310 gphi *stmt = gsi.phi ();
1311 if (!virtual_operand_p (gimple_phi_result (stmt)))
1313 gsi_next (&gsi);
1314 continue;
1316 mark_virtual_phi_result_for_renaming (stmt);
1317 remove_phi_node (&gsi, true);
1318 phi_p = true;
1320 /* In reality we have to rename the reaching definition of the
1321 virtual operand at return_bb as we will eventually release it
1322 when we remove the code region we outlined.
1323 So we have to rename all immediate virtual uses of that region
1324 if we didn't see a PHI definition yet. */
1325 /* ??? In real reality we want to set the reaching vdef of the
1326 entry of the SESE region as the vuse of the call and the reaching
1327 vdef of the exit of the SESE region as the vdef of the call. */
1328 if (!phi_p)
1329 for (gimple_stmt_iterator gsi = gsi_start_bb (return_bb);
1330 !gsi_end_p (gsi);
1331 gsi_next (&gsi))
1333 gimple *stmt = gsi_stmt (gsi);
1334 if (gimple_vuse (stmt))
1336 gimple_set_vuse (stmt, NULL_TREE);
1337 update_stmt (stmt);
1339 if (gimple_vdef (stmt))
1340 break;
1344 /* Now create the actual clone. */
1345 cgraph_edge::rebuild_edges ();
1346 node = cur_node->create_version_clone_with_body
1347 (vNULL, NULL, args_to_skip, !split_part_return_p, split_point->split_bbs,
1348 split_point->entry_bb, "part");
1350 node->split_part = true;
1352 /* Let's take a time profile for splitted function. */
1353 node->tp_first_run = cur_node->tp_first_run + 1;
1355 /* For usual cloning it is enough to clear builtin only when signature
1356 changes. For partial inlining we however can not expect the part
1357 of builtin implementation to have same semantic as the whole. */
1358 if (DECL_BUILT_IN (node->decl))
1360 DECL_BUILT_IN_CLASS (node->decl) = NOT_BUILT_IN;
1361 DECL_FUNCTION_CODE (node->decl) = (enum built_in_function) 0;
1364 /* If the original function is instrumented then it's
1365 part is also instrumented. */
1366 if (with_bounds)
1367 chkp_function_mark_instrumented (node->decl);
1369 /* If the original function is declared inline, there is no point in issuing
1370 a warning for the non-inlinable part. */
1371 DECL_NO_INLINE_WARNING_P (node->decl) = 1;
1372 cur_node->remove_callees ();
1373 cur_node->remove_all_references ();
1374 if (!split_part_return_p)
1375 TREE_THIS_VOLATILE (node->decl) = 1;
1376 if (dump_file)
1377 dump_function_to_file (node->decl, dump_file, dump_flags);
1379 /* Create the basic block we place call into. It is the entry basic block
1380 split after last label. */
1381 call_bb = split_point->entry_bb;
1382 for (gimple_stmt_iterator gsi = gsi_start_bb (call_bb); !gsi_end_p (gsi);)
1383 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
1385 last_stmt = gsi_stmt (gsi);
1386 gsi_next (&gsi);
1388 else
1389 break;
1390 e = split_block (split_point->entry_bb, last_stmt);
1391 remove_edge (e);
1393 /* Produce the call statement. */
1394 gimple_stmt_iterator gsi = gsi_last_bb (call_bb);
1395 FOR_EACH_VEC_ELT (args_to_pass, i, arg)
1396 if (!is_gimple_val (arg))
1398 arg = force_gimple_operand_gsi (&gsi, arg, true, NULL_TREE,
1399 false, GSI_CONTINUE_LINKING);
1400 args_to_pass[i] = arg;
1402 call = gimple_build_call_vec (node->decl, args_to_pass);
1403 gimple_call_set_with_bounds (call, with_bounds);
1404 gimple_set_block (call, DECL_INITIAL (current_function_decl));
1405 args_to_pass.release ();
1407 /* For optimized away parameters, add on the caller side
1408 before the call
1409 DEBUG D#X => parm_Y(D)
1410 stmts and associate D#X with parm in decl_debug_args_lookup
1411 vector to say for debug info that if parameter parm had been passed,
1412 it would have value parm_Y(D). */
1413 if (args_to_skip)
1414 for (parm = DECL_ARGUMENTS (current_function_decl), num = 0;
1415 parm; parm = DECL_CHAIN (parm), num++)
1416 if (bitmap_bit_p (args_to_skip, num)
1417 && is_gimple_reg (parm))
1419 tree ddecl;
1420 gimple *def_temp;
1422 /* This needs to be done even without MAY_HAVE_DEBUG_STMTS,
1423 otherwise if it didn't exist before, we'd end up with
1424 different SSA_NAME_VERSIONs between -g and -g0. */
1425 arg = get_or_create_ssa_default_def (cfun, parm);
1426 if (!MAY_HAVE_DEBUG_STMTS)
1427 continue;
1429 if (debug_args == NULL)
1430 debug_args = decl_debug_args_insert (node->decl);
1431 ddecl = make_node (DEBUG_EXPR_DECL);
1432 DECL_ARTIFICIAL (ddecl) = 1;
1433 TREE_TYPE (ddecl) = TREE_TYPE (parm);
1434 DECL_MODE (ddecl) = DECL_MODE (parm);
1435 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
1436 vec_safe_push (*debug_args, ddecl);
1437 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
1438 call);
1439 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
1441 /* And on the callee side, add
1442 DEBUG D#Y s=> parm
1443 DEBUG var => D#Y
1444 stmts to the first bb where var is a VAR_DECL created for the
1445 optimized away parameter in DECL_INITIAL block. This hints
1446 in the debug info that var (whole DECL_ORIGIN is the parm PARM_DECL)
1447 is optimized away, but could be looked up at the call site
1448 as value of D#X there. */
1449 if (debug_args != NULL)
1451 unsigned int i;
1452 tree var, vexpr;
1453 gimple_stmt_iterator cgsi;
1454 gimple *def_temp;
1456 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1457 var = BLOCK_VARS (DECL_INITIAL (node->decl));
1458 i = vec_safe_length (*debug_args);
1459 cgsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1462 i -= 2;
1463 while (var != NULL_TREE
1464 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
1465 var = TREE_CHAIN (var);
1466 if (var == NULL_TREE)
1467 break;
1468 vexpr = make_node (DEBUG_EXPR_DECL);
1469 parm = (**debug_args)[i];
1470 DECL_ARTIFICIAL (vexpr) = 1;
1471 TREE_TYPE (vexpr) = TREE_TYPE (parm);
1472 DECL_MODE (vexpr) = DECL_MODE (parm);
1473 def_temp = gimple_build_debug_source_bind (vexpr, parm,
1474 NULL);
1475 gsi_insert_before (&cgsi, def_temp, GSI_SAME_STMT);
1476 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
1477 gsi_insert_before (&cgsi, def_temp, GSI_SAME_STMT);
1479 while (i);
1480 pop_cfun ();
1483 /* We avoid address being taken on any variable used by split part,
1484 so return slot optimization is always possible. Moreover this is
1485 required to make DECL_BY_REFERENCE work. */
1486 if (aggregate_value_p (DECL_RESULT (current_function_decl),
1487 TREE_TYPE (current_function_decl))
1488 && (!is_gimple_reg_type (TREE_TYPE (DECL_RESULT (current_function_decl)))
1489 || DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))))
1490 gimple_call_set_return_slot_opt (call, true);
1492 if (add_tsan_func_exit)
1493 tsan_func_exit_call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
1495 /* Update return value. This is bit tricky. When we do not return,
1496 do nothing. When we return we might need to update return_bb
1497 or produce a new return statement. */
1498 if (!split_part_return_p)
1500 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1501 if (tsan_func_exit_call)
1502 gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1504 else
1506 e = make_edge (call_bb, return_bb,
1507 return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1508 ? 0 : EDGE_FALLTHRU);
1509 e->count = call_bb->count;
1510 e->probability = REG_BR_PROB_BASE;
1512 /* If there is return basic block, see what value we need to store
1513 return value into and put call just before it. */
1514 if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1516 real_retval = retval = find_retval (return_bb);
1517 retbnd = find_retbnd (return_bb);
1519 if (real_retval && split_point->split_part_set_retval)
1521 gphi_iterator psi;
1523 /* See if we need new SSA_NAME for the result.
1524 When DECL_BY_REFERENCE is true, retval is actually pointer to
1525 return value and it is constant in whole function. */
1526 if (TREE_CODE (retval) == SSA_NAME
1527 && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1529 retval = copy_ssa_name (retval, call);
1531 /* See if there is PHI defining return value. */
1532 for (psi = gsi_start_phis (return_bb);
1533 !gsi_end_p (psi); gsi_next (&psi))
1534 if (!virtual_operand_p (gimple_phi_result (psi.phi ())))
1535 break;
1537 /* When there is PHI, just update its value. */
1538 if (TREE_CODE (retval) == SSA_NAME
1539 && !gsi_end_p (psi))
1540 add_phi_arg (psi.phi (), retval, e, UNKNOWN_LOCATION);
1541 /* Otherwise update the return BB itself.
1542 find_return_bb allows at most one assignment to return value,
1543 so update first statement. */
1544 else
1546 gimple_stmt_iterator bsi;
1547 for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
1548 gsi_next (&bsi))
1549 if (greturn *return_stmt
1550 = dyn_cast <greturn *> (gsi_stmt (bsi)))
1552 gimple_return_set_retval (return_stmt, retval);
1553 break;
1555 else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
1556 && !gimple_clobber_p (gsi_stmt (bsi)))
1558 gimple_assign_set_rhs1 (gsi_stmt (bsi), retval);
1559 break;
1561 update_stmt (gsi_stmt (bsi));
1564 /* Replace retbnd with new one. */
1565 if (retbnd)
1567 gimple_stmt_iterator bsi;
1568 for (bsi = gsi_last_bb (return_bb); !gsi_end_p (bsi);
1569 gsi_prev (&bsi))
1570 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_RETURN)
1572 retbnd = copy_ssa_name (retbnd, call);
1573 gimple_return_set_retbnd (gsi_stmt (bsi), retbnd);
1574 update_stmt (gsi_stmt (bsi));
1575 break;
1579 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1581 gimple_call_set_lhs (call, build_simple_mem_ref (retval));
1582 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1584 else
1586 tree restype;
1587 restype = TREE_TYPE (DECL_RESULT (current_function_decl));
1588 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1589 if (!useless_type_conversion_p (TREE_TYPE (retval), restype))
1591 gimple *cpy;
1592 tree tem = create_tmp_reg (restype);
1593 tem = make_ssa_name (tem, call);
1594 cpy = gimple_build_assign (retval, NOP_EXPR, tem);
1595 gsi_insert_after (&gsi, cpy, GSI_NEW_STMT);
1596 retval = tem;
1598 /* Build bndret call to obtain returned bounds. */
1599 if (retbnd)
1600 chkp_insert_retbnd_call (retbnd, retval, &gsi);
1601 gimple_call_set_lhs (call, retval);
1602 update_stmt (call);
1605 else
1606 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1607 if (tsan_func_exit_call)
1608 gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1610 /* We don't use return block (there is either no return in function or
1611 multiple of them). So create new basic block with return statement.
1613 else
1615 greturn *ret;
1616 if (split_point->split_part_set_retval
1617 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1619 retval = DECL_RESULT (current_function_decl);
1621 if (chkp_function_instrumented_p (current_function_decl)
1622 && BOUNDED_P (retval))
1623 retbnd = create_tmp_reg (pointer_bounds_type_node);
1625 /* We use temporary register to hold value when aggregate_value_p
1626 is false. Similarly for DECL_BY_REFERENCE we must avoid extra
1627 copy. */
1628 if (!aggregate_value_p (retval, TREE_TYPE (current_function_decl))
1629 && !DECL_BY_REFERENCE (retval))
1630 retval = create_tmp_reg (TREE_TYPE (retval));
1631 if (is_gimple_reg (retval))
1633 /* When returning by reference, there is only one SSA name
1634 assigned to RESULT_DECL (that is pointer to return value).
1635 Look it up or create new one if it is missing. */
1636 if (DECL_BY_REFERENCE (retval))
1637 retval = get_or_create_ssa_default_def (cfun, retval);
1638 /* Otherwise produce new SSA name for return value. */
1639 else
1640 retval = make_ssa_name (retval, call);
1642 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1643 gimple_call_set_lhs (call, build_simple_mem_ref (retval));
1644 else
1645 gimple_call_set_lhs (call, retval);
1647 gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1648 /* Build bndret call to obtain returned bounds. */
1649 if (retbnd)
1650 chkp_insert_retbnd_call (retbnd, retval, &gsi);
1651 if (tsan_func_exit_call)
1652 gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1653 ret = gimple_build_return (retval);
1654 gsi_insert_after (&gsi, ret, GSI_NEW_STMT);
1657 free_dominance_info (CDI_DOMINATORS);
1658 free_dominance_info (CDI_POST_DOMINATORS);
1659 compute_inline_parameters (node, true);
1662 /* Execute function splitting pass. */
1664 static unsigned int
1665 execute_split_functions (void)
1667 gimple_stmt_iterator bsi;
1668 basic_block bb;
1669 int overall_time = 0, overall_size = 0;
1670 int todo = 0;
1671 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1673 if (flags_from_decl_or_type (current_function_decl)
1674 & (ECF_NORETURN|ECF_MALLOC))
1676 if (dump_file)
1677 fprintf (dump_file, "Not splitting: noreturn/malloc function.\n");
1678 return 0;
1680 if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
1682 if (dump_file)
1683 fprintf (dump_file, "Not splitting: main function.\n");
1684 return 0;
1686 /* This can be relaxed; function might become inlinable after splitting
1687 away the uninlinable part. */
1688 if (inline_edge_summary_vec.exists ()
1689 && !inline_summaries->get (node)->inlinable)
1691 if (dump_file)
1692 fprintf (dump_file, "Not splitting: not inlinable.\n");
1693 return 0;
1695 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
1697 if (dump_file)
1698 fprintf (dump_file, "Not splitting: disregarding inline limits.\n");
1699 return 0;
1701 /* This can be relaxed; most of versioning tests actually prevents
1702 a duplication. */
1703 if (!tree_versionable_function_p (current_function_decl))
1705 if (dump_file)
1706 fprintf (dump_file, "Not splitting: not versionable.\n");
1707 return 0;
1709 /* FIXME: we could support this. */
1710 if (DECL_STRUCT_FUNCTION (current_function_decl)->static_chain_decl)
1712 if (dump_file)
1713 fprintf (dump_file, "Not splitting: nested function.\n");
1714 return 0;
1717 /* See if it makes sense to try to split.
1718 It makes sense to split if we inline, that is if we have direct calls to
1719 handle or direct calls are possibly going to appear as result of indirect
1720 inlining or LTO. Also handle -fprofile-generate as LTO to allow non-LTO
1721 training for LTO -fprofile-use build.
1723 Note that we are not completely conservative about disqualifying functions
1724 called once. It is possible that the caller is called more then once and
1725 then inlining would still benefit. */
1726 if ((!node->callers
1727 /* Local functions called once will be completely inlined most of time. */
1728 || (!node->callers->next_caller && node->local.local))
1729 && !node->address_taken
1730 && !node->has_aliases_p ()
1731 && (!flag_lto || !node->externally_visible))
1733 if (dump_file)
1734 fprintf (dump_file, "Not splitting: not called directly "
1735 "or called once.\n");
1736 return 0;
1739 /* FIXME: We can actually split if splitting reduces call overhead. */
1740 if (!flag_inline_small_functions
1741 && !DECL_DECLARED_INLINE_P (current_function_decl))
1743 if (dump_file)
1744 fprintf (dump_file, "Not splitting: not autoinlining and function"
1745 " is not inline.\n");
1746 return 0;
1749 /* We enforce splitting after loop headers when profile info is not
1750 available. */
1751 if (profile_status_for_fn (cfun) != PROFILE_READ)
1752 mark_dfs_back_edges ();
1754 /* Initialize bitmap to track forbidden calls. */
1755 forbidden_dominators = BITMAP_ALLOC (NULL);
1756 calculate_dominance_info (CDI_DOMINATORS);
1758 /* Compute local info about basic blocks and determine function size/time. */
1759 bb_info_vec.safe_grow_cleared (last_basic_block_for_fn (cfun) + 1);
1760 memset (&best_split_point, 0, sizeof (best_split_point));
1761 basic_block return_bb = find_return_bb ();
1762 int tsan_exit_found = -1;
1763 FOR_EACH_BB_FN (bb, cfun)
1765 int time = 0;
1766 int size = 0;
1767 int freq = compute_call_stmt_bb_frequency (current_function_decl, bb);
1769 if (dump_file && (dump_flags & TDF_DETAILS))
1770 fprintf (dump_file, "Basic block %i\n", bb->index);
1772 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1774 int this_time, this_size;
1775 gimple *stmt = gsi_stmt (bsi);
1777 this_size = estimate_num_insns (stmt, &eni_size_weights);
1778 this_time = estimate_num_insns (stmt, &eni_time_weights) * freq;
1779 size += this_size;
1780 time += this_time;
1781 check_forbidden_calls (stmt);
1783 if (dump_file && (dump_flags & TDF_DETAILS))
1785 fprintf (dump_file, " freq:%6i size:%3i time:%3i ",
1786 freq, this_size, this_time);
1787 print_gimple_stmt (dump_file, stmt, 0, 0);
1790 if ((flag_sanitize & SANITIZE_THREAD)
1791 && is_gimple_call (stmt)
1792 && gimple_call_internal_p (stmt)
1793 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1795 /* We handle TSAN_FUNC_EXIT for splitting either in the
1796 return_bb, or in its immediate predecessors. */
1797 if ((bb != return_bb && !find_edge (bb, return_bb))
1798 || (tsan_exit_found != -1
1799 && tsan_exit_found != (bb != return_bb)))
1801 if (dump_file)
1802 fprintf (dump_file, "Not splitting: TSAN_FUNC_EXIT"
1803 " in unexpected basic block.\n");
1804 BITMAP_FREE (forbidden_dominators);
1805 bb_info_vec.release ();
1806 return 0;
1808 tsan_exit_found = bb != return_bb;
1811 overall_time += time;
1812 overall_size += size;
1813 bb_info_vec[bb->index].time = time;
1814 bb_info_vec[bb->index].size = size;
1816 find_split_points (return_bb, overall_time, overall_size);
1817 if (best_split_point.split_bbs)
1819 split_function (return_bb, &best_split_point, tsan_exit_found == 1);
1820 BITMAP_FREE (best_split_point.ssa_names_to_pass);
1821 BITMAP_FREE (best_split_point.split_bbs);
1822 todo = TODO_update_ssa | TODO_cleanup_cfg;
1824 BITMAP_FREE (forbidden_dominators);
1825 bb_info_vec.release ();
1826 return todo;
1829 namespace {
1831 const pass_data pass_data_split_functions =
1833 GIMPLE_PASS, /* type */
1834 "fnsplit", /* name */
1835 OPTGROUP_NONE, /* optinfo_flags */
1836 TV_IPA_FNSPLIT, /* tv_id */
1837 PROP_cfg, /* properties_required */
1838 0, /* properties_provided */
1839 0, /* properties_destroyed */
1840 0, /* todo_flags_start */
1841 0, /* todo_flags_finish */
1844 class pass_split_functions : public gimple_opt_pass
1846 public:
1847 pass_split_functions (gcc::context *ctxt)
1848 : gimple_opt_pass (pass_data_split_functions, ctxt)
1851 /* opt_pass methods: */
1852 virtual bool gate (function *);
1853 virtual unsigned int execute (function *)
1855 return execute_split_functions ();
1858 }; // class pass_split_functions
1860 bool
1861 pass_split_functions::gate (function *)
1863 /* When doing profile feedback, we want to execute the pass after profiling
1864 is read. So disable one in early optimization. */
1865 return (flag_partial_inlining
1866 && !profile_arc_flag && !flag_branch_probabilities);
1869 } // anon namespace
1871 gimple_opt_pass *
1872 make_pass_split_functions (gcc::context *ctxt)
1874 return new pass_split_functions (ctxt);
1877 /* Execute function splitting pass. */
1879 static unsigned int
1880 execute_feedback_split_functions (void)
1882 unsigned int retval = execute_split_functions ();
1883 if (retval)
1884 retval |= TODO_rebuild_cgraph_edges;
1885 return retval;
1888 namespace {
1890 const pass_data pass_data_feedback_split_functions =
1892 GIMPLE_PASS, /* type */
1893 "feedback_fnsplit", /* name */
1894 OPTGROUP_NONE, /* optinfo_flags */
1895 TV_IPA_FNSPLIT, /* tv_id */
1896 PROP_cfg, /* properties_required */
1897 0, /* properties_provided */
1898 0, /* properties_destroyed */
1899 0, /* todo_flags_start */
1900 0, /* todo_flags_finish */
1903 class pass_feedback_split_functions : public gimple_opt_pass
1905 public:
1906 pass_feedback_split_functions (gcc::context *ctxt)
1907 : gimple_opt_pass (pass_data_feedback_split_functions, ctxt)
1910 /* opt_pass methods: */
1911 virtual bool gate (function *);
1912 virtual unsigned int execute (function *)
1914 return execute_feedback_split_functions ();
1917 }; // class pass_feedback_split_functions
1919 bool
1920 pass_feedback_split_functions::gate (function *)
1922 /* We don't need to split when profiling at all, we are producing
1923 lousy code anyway. */
1924 return (flag_partial_inlining
1925 && flag_branch_probabilities);
1928 } // anon namespace
1930 gimple_opt_pass *
1931 make_pass_feedback_split_functions (gcc::context *ctxt)
1933 return new pass_feedback_split_functions (ctxt);