* include/bits/alloc_traits.h (__alloctr_rebind): Remove.
[official-gcc.git] / gcc / cfgloopmanip.c
blobb5c13a79127552c468d180b0b66c1c3e03b9522c
1 /* Loop manipulation code for GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "predict.h"
26 #include "symtab.h"
27 #include "hard-reg-set.h"
28 #include "function.h"
29 #include "dominance.h"
30 #include "cfg.h"
31 #include "cfganal.h"
32 #include "basic-block.h"
33 #include "cfgloop.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "gimple.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "tree-ssa-loop-manip.h"
43 #include "dumpfile.h"
45 static void copy_loops_to (struct loop **, int,
46 struct loop *);
47 static void loop_redirect_edge (edge, basic_block);
48 static void remove_bbs (basic_block *, int);
49 static bool rpe_enum_p (const_basic_block, const void *);
50 static int find_path (edge, basic_block **);
51 static void fix_loop_placements (struct loop *, bool *);
52 static bool fix_bb_placement (basic_block);
53 static void fix_bb_placements (basic_block, bool *, bitmap);
55 /* Checks whether basic block BB is dominated by DATA. */
56 static bool
57 rpe_enum_p (const_basic_block bb, const void *data)
59 return dominated_by_p (CDI_DOMINATORS, bb, (const_basic_block) data);
62 /* Remove basic blocks BBS. NBBS is the number of the basic blocks. */
64 static void
65 remove_bbs (basic_block *bbs, int nbbs)
67 int i;
69 for (i = 0; i < nbbs; i++)
70 delete_basic_block (bbs[i]);
73 /* Find path -- i.e. the basic blocks dominated by edge E and put them
74 into array BBS, that will be allocated large enough to contain them.
75 E->dest must have exactly one predecessor for this to work (it is
76 easy to achieve and we do not put it here because we do not want to
77 alter anything by this function). The number of basic blocks in the
78 path is returned. */
79 static int
80 find_path (edge e, basic_block **bbs)
82 gcc_assert (EDGE_COUNT (e->dest->preds) <= 1);
84 /* Find bbs in the path. */
85 *bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
86 return dfs_enumerate_from (e->dest, 0, rpe_enum_p, *bbs,
87 n_basic_blocks_for_fn (cfun), e->dest);
90 /* Fix placement of basic block BB inside loop hierarchy --
91 Let L be a loop to that BB belongs. Then every successor of BB must either
92 1) belong to some superloop of loop L, or
93 2) be a header of loop K such that K->outer is superloop of L
94 Returns true if we had to move BB into other loop to enforce this condition,
95 false if the placement of BB was already correct (provided that placements
96 of its successors are correct). */
97 static bool
98 fix_bb_placement (basic_block bb)
100 edge e;
101 edge_iterator ei;
102 struct loop *loop = current_loops->tree_root, *act;
104 FOR_EACH_EDGE (e, ei, bb->succs)
106 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
107 continue;
109 act = e->dest->loop_father;
110 if (act->header == e->dest)
111 act = loop_outer (act);
113 if (flow_loop_nested_p (loop, act))
114 loop = act;
117 if (loop == bb->loop_father)
118 return false;
120 remove_bb_from_loops (bb);
121 add_bb_to_loop (bb, loop);
123 return true;
126 /* Fix placement of LOOP inside loop tree, i.e. find the innermost superloop
127 of LOOP to that leads at least one exit edge of LOOP, and set it
128 as the immediate superloop of LOOP. Return true if the immediate superloop
129 of LOOP changed.
131 IRRED_INVALIDATED is set to true if a change in the loop structures might
132 invalidate the information about irreducible regions. */
134 static bool
135 fix_loop_placement (struct loop *loop, bool *irred_invalidated)
137 unsigned i;
138 edge e;
139 vec<edge> exits = get_loop_exit_edges (loop);
140 struct loop *father = current_loops->tree_root, *act;
141 bool ret = false;
143 FOR_EACH_VEC_ELT (exits, i, e)
145 act = find_common_loop (loop, e->dest->loop_father);
146 if (flow_loop_nested_p (father, act))
147 father = act;
150 if (father != loop_outer (loop))
152 for (act = loop_outer (loop); act != father; act = loop_outer (act))
153 act->num_nodes -= loop->num_nodes;
154 flow_loop_tree_node_remove (loop);
155 flow_loop_tree_node_add (father, loop);
157 /* The exit edges of LOOP no longer exits its original immediate
158 superloops; remove them from the appropriate exit lists. */
159 FOR_EACH_VEC_ELT (exits, i, e)
161 /* We may need to recompute irreducible loops. */
162 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
163 *irred_invalidated = true;
164 rescan_loop_exit (e, false, false);
167 ret = true;
170 exits.release ();
171 return ret;
174 /* Fix placements of basic blocks inside loop hierarchy stored in loops; i.e.
175 enforce condition condition stated in description of fix_bb_placement. We
176 start from basic block FROM that had some of its successors removed, so that
177 his placement no longer has to be correct, and iteratively fix placement of
178 its predecessors that may change if placement of FROM changed. Also fix
179 placement of subloops of FROM->loop_father, that might also be altered due
180 to this change; the condition for them is similar, except that instead of
181 successors we consider edges coming out of the loops.
183 If the changes may invalidate the information about irreducible regions,
184 IRRED_INVALIDATED is set to true.
186 If LOOP_CLOSED_SSA_INVLIDATED is non-zero then all basic blocks with
187 changed loop_father are collected there. */
189 static void
190 fix_bb_placements (basic_block from,
191 bool *irred_invalidated,
192 bitmap loop_closed_ssa_invalidated)
194 sbitmap in_queue;
195 basic_block *queue, *qtop, *qbeg, *qend;
196 struct loop *base_loop, *target_loop;
197 edge e;
199 /* We pass through blocks back-reachable from FROM, testing whether some
200 of their successors moved to outer loop. It may be necessary to
201 iterate several times, but it is finite, as we stop unless we move
202 the basic block up the loop structure. The whole story is a bit
203 more complicated due to presence of subloops, those are moved using
204 fix_loop_placement. */
206 base_loop = from->loop_father;
207 /* If we are already in the outermost loop, the basic blocks cannot be moved
208 outside of it. If FROM is the header of the base loop, it cannot be moved
209 outside of it, either. In both cases, we can end now. */
210 if (base_loop == current_loops->tree_root
211 || from == base_loop->header)
212 return;
214 in_queue = sbitmap_alloc (last_basic_block_for_fn (cfun));
215 bitmap_clear (in_queue);
216 bitmap_set_bit (in_queue, from->index);
217 /* Prevent us from going out of the base_loop. */
218 bitmap_set_bit (in_queue, base_loop->header->index);
220 queue = XNEWVEC (basic_block, base_loop->num_nodes + 1);
221 qtop = queue + base_loop->num_nodes + 1;
222 qbeg = queue;
223 qend = queue + 1;
224 *qbeg = from;
226 while (qbeg != qend)
228 edge_iterator ei;
229 from = *qbeg;
230 qbeg++;
231 if (qbeg == qtop)
232 qbeg = queue;
233 bitmap_clear_bit (in_queue, from->index);
235 if (from->loop_father->header == from)
237 /* Subloop header, maybe move the loop upward. */
238 if (!fix_loop_placement (from->loop_father, irred_invalidated))
239 continue;
240 target_loop = loop_outer (from->loop_father);
241 if (loop_closed_ssa_invalidated)
243 basic_block *bbs = get_loop_body (from->loop_father);
244 for (unsigned i = 0; i < from->loop_father->num_nodes; ++i)
245 bitmap_set_bit (loop_closed_ssa_invalidated, bbs[i]->index);
246 free (bbs);
249 else
251 /* Ordinary basic block. */
252 if (!fix_bb_placement (from))
253 continue;
254 target_loop = from->loop_father;
255 if (loop_closed_ssa_invalidated)
256 bitmap_set_bit (loop_closed_ssa_invalidated, from->index);
259 FOR_EACH_EDGE (e, ei, from->succs)
261 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
262 *irred_invalidated = true;
265 /* Something has changed, insert predecessors into queue. */
266 FOR_EACH_EDGE (e, ei, from->preds)
268 basic_block pred = e->src;
269 struct loop *nca;
271 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
272 *irred_invalidated = true;
274 if (bitmap_bit_p (in_queue, pred->index))
275 continue;
277 /* If it is subloop, then it either was not moved, or
278 the path up the loop tree from base_loop do not contain
279 it. */
280 nca = find_common_loop (pred->loop_father, base_loop);
281 if (pred->loop_father != base_loop
282 && (nca == base_loop
283 || nca != pred->loop_father))
284 pred = pred->loop_father->header;
285 else if (!flow_loop_nested_p (target_loop, pred->loop_father))
287 /* If PRED is already higher in the loop hierarchy than the
288 TARGET_LOOP to that we moved FROM, the change of the position
289 of FROM does not affect the position of PRED, so there is no
290 point in processing it. */
291 continue;
294 if (bitmap_bit_p (in_queue, pred->index))
295 continue;
297 /* Schedule the basic block. */
298 *qend = pred;
299 qend++;
300 if (qend == qtop)
301 qend = queue;
302 bitmap_set_bit (in_queue, pred->index);
305 free (in_queue);
306 free (queue);
309 /* Removes path beginning at edge E, i.e. remove basic blocks dominated by E
310 and update loop structures and dominators. Return true if we were able
311 to remove the path, false otherwise (and nothing is affected then). */
312 bool
313 remove_path (edge e)
315 edge ae;
316 basic_block *rem_bbs, *bord_bbs, from, bb;
317 vec<basic_block> dom_bbs;
318 int i, nrem, n_bord_bbs;
319 sbitmap seen;
320 bool irred_invalidated = false;
321 edge_iterator ei;
322 struct loop *l, *f;
324 if (!can_remove_branch_p (e))
325 return false;
327 /* Keep track of whether we need to update information about irreducible
328 regions. This is the case if the removed area is a part of the
329 irreducible region, or if the set of basic blocks that belong to a loop
330 that is inside an irreducible region is changed, or if such a loop is
331 removed. */
332 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
333 irred_invalidated = true;
335 /* We need to check whether basic blocks are dominated by the edge
336 e, but we only have basic block dominators. This is easy to
337 fix -- when e->dest has exactly one predecessor, this corresponds
338 to blocks dominated by e->dest, if not, split the edge. */
339 if (!single_pred_p (e->dest))
340 e = single_pred_edge (split_edge (e));
342 /* It may happen that by removing path we remove one or more loops
343 we belong to. In this case first unloop the loops, then proceed
344 normally. We may assume that e->dest is not a header of any loop,
345 as it now has exactly one predecessor. */
346 for (l = e->src->loop_father; loop_outer (l); l = f)
348 f = loop_outer (l);
349 if (dominated_by_p (CDI_DOMINATORS, l->latch, e->dest))
350 unloop (l, &irred_invalidated, NULL);
353 /* Identify the path. */
354 nrem = find_path (e, &rem_bbs);
356 n_bord_bbs = 0;
357 bord_bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
358 seen = sbitmap_alloc (last_basic_block_for_fn (cfun));
359 bitmap_clear (seen);
361 /* Find "border" hexes -- i.e. those with predecessor in removed path. */
362 for (i = 0; i < nrem; i++)
363 bitmap_set_bit (seen, rem_bbs[i]->index);
364 if (!irred_invalidated)
365 FOR_EACH_EDGE (ae, ei, e->src->succs)
366 if (ae != e && ae->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
367 && !bitmap_bit_p (seen, ae->dest->index)
368 && ae->flags & EDGE_IRREDUCIBLE_LOOP)
370 irred_invalidated = true;
371 break;
374 for (i = 0; i < nrem; i++)
376 bb = rem_bbs[i];
377 FOR_EACH_EDGE (ae, ei, rem_bbs[i]->succs)
378 if (ae->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
379 && !bitmap_bit_p (seen, ae->dest->index))
381 bitmap_set_bit (seen, ae->dest->index);
382 bord_bbs[n_bord_bbs++] = ae->dest;
384 if (ae->flags & EDGE_IRREDUCIBLE_LOOP)
385 irred_invalidated = true;
389 /* Remove the path. */
390 from = e->src;
391 remove_branch (e);
392 dom_bbs.create (0);
394 /* Cancel loops contained in the path. */
395 for (i = 0; i < nrem; i++)
396 if (rem_bbs[i]->loop_father->header == rem_bbs[i])
397 cancel_loop_tree (rem_bbs[i]->loop_father);
399 remove_bbs (rem_bbs, nrem);
400 free (rem_bbs);
402 /* Find blocks whose dominators may be affected. */
403 bitmap_clear (seen);
404 for (i = 0; i < n_bord_bbs; i++)
406 basic_block ldom;
408 bb = get_immediate_dominator (CDI_DOMINATORS, bord_bbs[i]);
409 if (bitmap_bit_p (seen, bb->index))
410 continue;
411 bitmap_set_bit (seen, bb->index);
413 for (ldom = first_dom_son (CDI_DOMINATORS, bb);
414 ldom;
415 ldom = next_dom_son (CDI_DOMINATORS, ldom))
416 if (!dominated_by_p (CDI_DOMINATORS, from, ldom))
417 dom_bbs.safe_push (ldom);
420 free (seen);
422 /* Recount dominators. */
423 iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, true);
424 dom_bbs.release ();
425 free (bord_bbs);
427 /* Fix placements of basic blocks inside loops and the placement of
428 loops in the loop tree. */
429 fix_bb_placements (from, &irred_invalidated, NULL);
430 fix_loop_placements (from->loop_father, &irred_invalidated);
432 if (irred_invalidated
433 && loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS))
434 mark_irreducible_loops ();
436 return true;
439 /* Creates place for a new LOOP in loops structure of FN. */
441 void
442 place_new_loop (struct function *fn, struct loop *loop)
444 loop->num = number_of_loops (fn);
445 vec_safe_push (loops_for_fn (fn)->larray, loop);
448 /* Given LOOP structure with filled header and latch, find the body of the
449 corresponding loop and add it to loops tree. Insert the LOOP as a son of
450 outer. */
452 void
453 add_loop (struct loop *loop, struct loop *outer)
455 basic_block *bbs;
456 int i, n;
457 struct loop *subloop;
458 edge e;
459 edge_iterator ei;
461 /* Add it to loop structure. */
462 place_new_loop (cfun, loop);
463 flow_loop_tree_node_add (outer, loop);
465 /* Find its nodes. */
466 bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
467 n = get_loop_body_with_size (loop, bbs, n_basic_blocks_for_fn (cfun));
469 for (i = 0; i < n; i++)
471 if (bbs[i]->loop_father == outer)
473 remove_bb_from_loops (bbs[i]);
474 add_bb_to_loop (bbs[i], loop);
475 continue;
478 loop->num_nodes++;
480 /* If we find a direct subloop of OUTER, move it to LOOP. */
481 subloop = bbs[i]->loop_father;
482 if (loop_outer (subloop) == outer
483 && subloop->header == bbs[i])
485 flow_loop_tree_node_remove (subloop);
486 flow_loop_tree_node_add (loop, subloop);
490 /* Update the information about loop exit edges. */
491 for (i = 0; i < n; i++)
493 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
495 rescan_loop_exit (e, false, false);
499 free (bbs);
502 /* Multiply all frequencies in LOOP by NUM/DEN. */
504 void
505 scale_loop_frequencies (struct loop *loop, int num, int den)
507 basic_block *bbs;
509 bbs = get_loop_body (loop);
510 scale_bbs_frequencies_int (bbs, loop->num_nodes, num, den);
511 free (bbs);
514 /* Multiply all frequencies in LOOP by SCALE/REG_BR_PROB_BASE.
515 If ITERATION_BOUND is non-zero, scale even further if loop is predicted
516 to iterate too many times. */
518 void
519 scale_loop_profile (struct loop *loop, int scale, gcov_type iteration_bound)
521 gcov_type iterations = expected_loop_iterations_unbounded (loop);
522 edge e;
523 edge_iterator ei;
525 if (dump_file && (dump_flags & TDF_DETAILS))
526 fprintf (dump_file, ";; Scaling loop %i with scale %f, "
527 "bounding iterations to %i from guessed %i\n",
528 loop->num, (double)scale / REG_BR_PROB_BASE,
529 (int)iteration_bound, (int)iterations);
531 /* See if loop is predicted to iterate too many times. */
532 if (iteration_bound && iterations > 0
533 && apply_probability (iterations, scale) > iteration_bound)
535 /* Fixing loop profile for different trip count is not trivial; the exit
536 probabilities has to be updated to match and frequencies propagated down
537 to the loop body.
539 We fully update only the simple case of loop with single exit that is
540 either from the latch or BB just before latch and leads from BB with
541 simple conditional jump. This is OK for use in vectorizer. */
542 e = single_exit (loop);
543 if (e)
545 edge other_e;
546 int freq_delta;
547 gcov_type count_delta;
549 FOR_EACH_EDGE (other_e, ei, e->src->succs)
550 if (!(other_e->flags & (EDGE_ABNORMAL | EDGE_FAKE))
551 && e != other_e)
552 break;
554 /* Probability of exit must be 1/iterations. */
555 freq_delta = EDGE_FREQUENCY (e);
556 e->probability = REG_BR_PROB_BASE / iteration_bound;
557 other_e->probability = inverse_probability (e->probability);
558 freq_delta -= EDGE_FREQUENCY (e);
560 /* Adjust counts accordingly. */
561 count_delta = e->count;
562 e->count = apply_probability (e->src->count, e->probability);
563 other_e->count = apply_probability (e->src->count, other_e->probability);
564 count_delta -= e->count;
566 /* If latch exists, change its frequency and count, since we changed
567 probability of exit. Theoretically we should update everything from
568 source of exit edge to latch, but for vectorizer this is enough. */
569 if (loop->latch
570 && loop->latch != e->src)
572 loop->latch->frequency += freq_delta;
573 if (loop->latch->frequency < 0)
574 loop->latch->frequency = 0;
575 loop->latch->count += count_delta;
576 if (loop->latch->count < 0)
577 loop->latch->count = 0;
581 /* Roughly speaking we want to reduce the loop body profile by the
582 the difference of loop iterations. We however can do better if
583 we look at the actual profile, if it is available. */
584 scale = RDIV (iteration_bound * scale, iterations);
585 if (loop->header->count)
587 gcov_type count_in = 0;
589 FOR_EACH_EDGE (e, ei, loop->header->preds)
590 if (e->src != loop->latch)
591 count_in += e->count;
593 if (count_in != 0)
594 scale = GCOV_COMPUTE_SCALE (count_in * iteration_bound,
595 loop->header->count);
597 else if (loop->header->frequency)
599 int freq_in = 0;
601 FOR_EACH_EDGE (e, ei, loop->header->preds)
602 if (e->src != loop->latch)
603 freq_in += EDGE_FREQUENCY (e);
605 if (freq_in != 0)
606 scale = GCOV_COMPUTE_SCALE (freq_in * iteration_bound,
607 loop->header->frequency);
609 if (!scale)
610 scale = 1;
613 if (scale == REG_BR_PROB_BASE)
614 return;
616 /* Scale the actual probabilities. */
617 scale_loop_frequencies (loop, scale, REG_BR_PROB_BASE);
618 if (dump_file && (dump_flags & TDF_DETAILS))
619 fprintf (dump_file, ";; guessed iterations are now %i\n",
620 (int)expected_loop_iterations_unbounded (loop));
623 /* Recompute dominance information for basic blocks outside LOOP. */
625 static void
626 update_dominators_in_loop (struct loop *loop)
628 vec<basic_block> dom_bbs = vNULL;
629 sbitmap seen;
630 basic_block *body;
631 unsigned i;
633 seen = sbitmap_alloc (last_basic_block_for_fn (cfun));
634 bitmap_clear (seen);
635 body = get_loop_body (loop);
637 for (i = 0; i < loop->num_nodes; i++)
638 bitmap_set_bit (seen, body[i]->index);
640 for (i = 0; i < loop->num_nodes; i++)
642 basic_block ldom;
644 for (ldom = first_dom_son (CDI_DOMINATORS, body[i]);
645 ldom;
646 ldom = next_dom_son (CDI_DOMINATORS, ldom))
647 if (!bitmap_bit_p (seen, ldom->index))
649 bitmap_set_bit (seen, ldom->index);
650 dom_bbs.safe_push (ldom);
654 iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, false);
655 free (body);
656 free (seen);
657 dom_bbs.release ();
660 /* Creates an if region as shown above. CONDITION is used to create
661 the test for the if.
664 | ------------- -------------
665 | | pred_bb | | pred_bb |
666 | ------------- -------------
667 | | |
668 | | | ENTRY_EDGE
669 | | ENTRY_EDGE V
670 | | ====> -------------
671 | | | cond_bb |
672 | | | CONDITION |
673 | | -------------
674 | V / \
675 | ------------- e_false / \ e_true
676 | | succ_bb | V V
677 | ------------- ----------- -----------
678 | | false_bb | | true_bb |
679 | ----------- -----------
680 | \ /
681 | \ /
682 | V V
683 | -------------
684 | | join_bb |
685 | -------------
686 | | exit_edge (result)
688 | -----------
689 | | succ_bb |
690 | -----------
694 edge
695 create_empty_if_region_on_edge (edge entry_edge, tree condition)
698 basic_block cond_bb, true_bb, false_bb, join_bb;
699 edge e_true, e_false, exit_edge;
700 gcond *cond_stmt;
701 tree simple_cond;
702 gimple_stmt_iterator gsi;
704 cond_bb = split_edge (entry_edge);
706 /* Insert condition in cond_bb. */
707 gsi = gsi_last_bb (cond_bb);
708 simple_cond =
709 force_gimple_operand_gsi (&gsi, condition, true, NULL,
710 false, GSI_NEW_STMT);
711 cond_stmt = gimple_build_cond_from_tree (simple_cond, NULL_TREE, NULL_TREE);
712 gsi = gsi_last_bb (cond_bb);
713 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
715 join_bb = split_edge (single_succ_edge (cond_bb));
717 e_true = single_succ_edge (cond_bb);
718 true_bb = split_edge (e_true);
720 e_false = make_edge (cond_bb, join_bb, 0);
721 false_bb = split_edge (e_false);
723 e_true->flags &= ~EDGE_FALLTHRU;
724 e_true->flags |= EDGE_TRUE_VALUE;
725 e_false->flags &= ~EDGE_FALLTHRU;
726 e_false->flags |= EDGE_FALSE_VALUE;
728 set_immediate_dominator (CDI_DOMINATORS, cond_bb, entry_edge->src);
729 set_immediate_dominator (CDI_DOMINATORS, true_bb, cond_bb);
730 set_immediate_dominator (CDI_DOMINATORS, false_bb, cond_bb);
731 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
733 exit_edge = single_succ_edge (join_bb);
735 if (single_pred_p (exit_edge->dest))
736 set_immediate_dominator (CDI_DOMINATORS, exit_edge->dest, join_bb);
738 return exit_edge;
741 /* create_empty_loop_on_edge
743 | - pred_bb - ------ pred_bb ------
744 | | | | iv0 = initial_value |
745 | -----|----- ---------|-----------
746 | | ______ | entry_edge
747 | | entry_edge / | |
748 | | ====> | -V---V- loop_header -------------
749 | V | | iv_before = phi (iv0, iv_after) |
750 | - succ_bb - | ---|-----------------------------
751 | | | | |
752 | ----------- | ---V--- loop_body ---------------
753 | | | iv_after = iv_before + stride |
754 | | | if (iv_before < upper_bound) |
755 | | ---|--------------\--------------
756 | | | \ exit_e
757 | | V \
758 | | - loop_latch - V- succ_bb -
759 | | | | | |
760 | | /------------- -----------
761 | \ ___ /
763 Creates an empty loop as shown above, the IV_BEFORE is the SSA_NAME
764 that is used before the increment of IV. IV_BEFORE should be used for
765 adding code to the body that uses the IV. OUTER is the outer loop in
766 which the new loop should be inserted.
768 Both INITIAL_VALUE and UPPER_BOUND expressions are gimplified and
769 inserted on the loop entry edge. This implies that this function
770 should be used only when the UPPER_BOUND expression is a loop
771 invariant. */
773 struct loop *
774 create_empty_loop_on_edge (edge entry_edge,
775 tree initial_value,
776 tree stride, tree upper_bound,
777 tree iv,
778 tree *iv_before,
779 tree *iv_after,
780 struct loop *outer)
782 basic_block loop_header, loop_latch, succ_bb, pred_bb;
783 struct loop *loop;
784 gimple_stmt_iterator gsi;
785 gimple_seq stmts;
786 gcond *cond_expr;
787 tree exit_test;
788 edge exit_e;
789 int prob;
791 gcc_assert (entry_edge && initial_value && stride && upper_bound && iv);
793 /* Create header, latch and wire up the loop. */
794 pred_bb = entry_edge->src;
795 loop_header = split_edge (entry_edge);
796 loop_latch = split_edge (single_succ_edge (loop_header));
797 succ_bb = single_succ (loop_latch);
798 make_edge (loop_header, succ_bb, 0);
799 redirect_edge_succ_nodup (single_succ_edge (loop_latch), loop_header);
801 /* Set immediate dominator information. */
802 set_immediate_dominator (CDI_DOMINATORS, loop_header, pred_bb);
803 set_immediate_dominator (CDI_DOMINATORS, loop_latch, loop_header);
804 set_immediate_dominator (CDI_DOMINATORS, succ_bb, loop_header);
806 /* Initialize a loop structure and put it in a loop hierarchy. */
807 loop = alloc_loop ();
808 loop->header = loop_header;
809 loop->latch = loop_latch;
810 add_loop (loop, outer);
812 /* TODO: Fix frequencies and counts. */
813 prob = REG_BR_PROB_BASE / 2;
815 scale_loop_frequencies (loop, REG_BR_PROB_BASE - prob, REG_BR_PROB_BASE);
817 /* Update dominators. */
818 update_dominators_in_loop (loop);
820 /* Modify edge flags. */
821 exit_e = single_exit (loop);
822 exit_e->flags = EDGE_LOOP_EXIT | EDGE_FALSE_VALUE;
823 single_pred_edge (loop_latch)->flags = EDGE_TRUE_VALUE;
825 /* Construct IV code in loop. */
826 initial_value = force_gimple_operand (initial_value, &stmts, true, iv);
827 if (stmts)
829 gsi_insert_seq_on_edge (loop_preheader_edge (loop), stmts);
830 gsi_commit_edge_inserts ();
833 upper_bound = force_gimple_operand (upper_bound, &stmts, true, NULL);
834 if (stmts)
836 gsi_insert_seq_on_edge (loop_preheader_edge (loop), stmts);
837 gsi_commit_edge_inserts ();
840 gsi = gsi_last_bb (loop_header);
841 create_iv (initial_value, stride, iv, loop, &gsi, false,
842 iv_before, iv_after);
844 /* Insert loop exit condition. */
845 cond_expr = gimple_build_cond
846 (LT_EXPR, *iv_before, upper_bound, NULL_TREE, NULL_TREE);
848 exit_test = gimple_cond_lhs (cond_expr);
849 exit_test = force_gimple_operand_gsi (&gsi, exit_test, true, NULL,
850 false, GSI_NEW_STMT);
851 gimple_cond_set_lhs (cond_expr, exit_test);
852 gsi = gsi_last_bb (exit_e->src);
853 gsi_insert_after (&gsi, cond_expr, GSI_NEW_STMT);
855 split_block_after_labels (loop_header);
857 return loop;
860 /* Make area between HEADER_EDGE and LATCH_EDGE a loop by connecting
861 latch to header and update loop tree and dominators
862 accordingly. Everything between them plus LATCH_EDGE destination must
863 be dominated by HEADER_EDGE destination, and back-reachable from
864 LATCH_EDGE source. HEADER_EDGE is redirected to basic block SWITCH_BB,
865 FALSE_EDGE of SWITCH_BB to original destination of HEADER_EDGE and
866 TRUE_EDGE of SWITCH_BB to original destination of LATCH_EDGE.
867 Returns the newly created loop. Frequencies and counts in the new loop
868 are scaled by FALSE_SCALE and in the old one by TRUE_SCALE. */
870 struct loop *
871 loopify (edge latch_edge, edge header_edge,
872 basic_block switch_bb, edge true_edge, edge false_edge,
873 bool redirect_all_edges, unsigned true_scale, unsigned false_scale)
875 basic_block succ_bb = latch_edge->dest;
876 basic_block pred_bb = header_edge->src;
877 struct loop *loop = alloc_loop ();
878 struct loop *outer = loop_outer (succ_bb->loop_father);
879 int freq;
880 gcov_type cnt;
881 edge e;
882 edge_iterator ei;
884 loop->header = header_edge->dest;
885 loop->latch = latch_edge->src;
887 freq = EDGE_FREQUENCY (header_edge);
888 cnt = header_edge->count;
890 /* Redirect edges. */
891 loop_redirect_edge (latch_edge, loop->header);
892 loop_redirect_edge (true_edge, succ_bb);
894 /* During loop versioning, one of the switch_bb edge is already properly
895 set. Do not redirect it again unless redirect_all_edges is true. */
896 if (redirect_all_edges)
898 loop_redirect_edge (header_edge, switch_bb);
899 loop_redirect_edge (false_edge, loop->header);
901 /* Update dominators. */
902 set_immediate_dominator (CDI_DOMINATORS, switch_bb, pred_bb);
903 set_immediate_dominator (CDI_DOMINATORS, loop->header, switch_bb);
906 set_immediate_dominator (CDI_DOMINATORS, succ_bb, switch_bb);
908 /* Compute new loop. */
909 add_loop (loop, outer);
911 /* Add switch_bb to appropriate loop. */
912 if (switch_bb->loop_father)
913 remove_bb_from_loops (switch_bb);
914 add_bb_to_loop (switch_bb, outer);
916 /* Fix frequencies. */
917 if (redirect_all_edges)
919 switch_bb->frequency = freq;
920 switch_bb->count = cnt;
921 FOR_EACH_EDGE (e, ei, switch_bb->succs)
923 e->count = apply_probability (switch_bb->count, e->probability);
926 scale_loop_frequencies (loop, false_scale, REG_BR_PROB_BASE);
927 scale_loop_frequencies (succ_bb->loop_father, true_scale, REG_BR_PROB_BASE);
928 update_dominators_in_loop (loop);
930 return loop;
933 /* Remove the latch edge of a LOOP and update loops to indicate that
934 the LOOP was removed. After this function, original loop latch will
935 have no successor, which caller is expected to fix somehow.
937 If this may cause the information about irreducible regions to become
938 invalid, IRRED_INVALIDATED is set to true.
940 LOOP_CLOSED_SSA_INVALIDATED, if non-NULL, is a bitmap where we store
941 basic blocks that had non-trivial update on their loop_father.*/
943 void
944 unloop (struct loop *loop, bool *irred_invalidated,
945 bitmap loop_closed_ssa_invalidated)
947 basic_block *body;
948 struct loop *ploop;
949 unsigned i, n;
950 basic_block latch = loop->latch;
951 bool dummy = false;
953 if (loop_preheader_edge (loop)->flags & EDGE_IRREDUCIBLE_LOOP)
954 *irred_invalidated = true;
956 /* This is relatively straightforward. The dominators are unchanged, as
957 loop header dominates loop latch, so the only thing we have to care of
958 is the placement of loops and basic blocks inside the loop tree. We
959 move them all to the loop->outer, and then let fix_bb_placements do
960 its work. */
962 body = get_loop_body (loop);
963 n = loop->num_nodes;
964 for (i = 0; i < n; i++)
965 if (body[i]->loop_father == loop)
967 remove_bb_from_loops (body[i]);
968 add_bb_to_loop (body[i], loop_outer (loop));
970 free (body);
972 while (loop->inner)
974 ploop = loop->inner;
975 flow_loop_tree_node_remove (ploop);
976 flow_loop_tree_node_add (loop_outer (loop), ploop);
979 /* Remove the loop and free its data. */
980 delete_loop (loop);
982 remove_edge (single_succ_edge (latch));
984 /* We do not pass IRRED_INVALIDATED to fix_bb_placements here, as even if
985 there is an irreducible region inside the cancelled loop, the flags will
986 be still correct. */
987 fix_bb_placements (latch, &dummy, loop_closed_ssa_invalidated);
990 /* Fix placement of superloops of LOOP inside loop tree, i.e. ensure that
991 condition stated in description of fix_loop_placement holds for them.
992 It is used in case when we removed some edges coming out of LOOP, which
993 may cause the right placement of LOOP inside loop tree to change.
995 IRRED_INVALIDATED is set to true if a change in the loop structures might
996 invalidate the information about irreducible regions. */
998 static void
999 fix_loop_placements (struct loop *loop, bool *irred_invalidated)
1001 struct loop *outer;
1003 while (loop_outer (loop))
1005 outer = loop_outer (loop);
1006 if (!fix_loop_placement (loop, irred_invalidated))
1007 break;
1009 /* Changing the placement of a loop in the loop tree may alter the
1010 validity of condition 2) of the description of fix_bb_placement
1011 for its preheader, because the successor is the header and belongs
1012 to the loop. So call fix_bb_placements to fix up the placement
1013 of the preheader and (possibly) of its predecessors. */
1014 fix_bb_placements (loop_preheader_edge (loop)->src,
1015 irred_invalidated, NULL);
1016 loop = outer;
1020 /* Duplicate loop bounds and other information we store about
1021 the loop into its duplicate. */
1023 void
1024 copy_loop_info (struct loop *loop, struct loop *target)
1026 gcc_checking_assert (!target->any_upper_bound && !target->any_estimate);
1027 target->any_upper_bound = loop->any_upper_bound;
1028 target->nb_iterations_upper_bound = loop->nb_iterations_upper_bound;
1029 target->any_estimate = loop->any_estimate;
1030 target->nb_iterations_estimate = loop->nb_iterations_estimate;
1031 target->estimate_state = loop->estimate_state;
1032 target->warned_aggressive_loop_optimizations
1033 |= loop->warned_aggressive_loop_optimizations;
1036 /* Copies copy of LOOP as subloop of TARGET loop, placing newly
1037 created loop into loops structure. */
1038 struct loop *
1039 duplicate_loop (struct loop *loop, struct loop *target)
1041 struct loop *cloop;
1042 cloop = alloc_loop ();
1043 place_new_loop (cfun, cloop);
1045 copy_loop_info (loop, cloop);
1047 /* Mark the new loop as copy of LOOP. */
1048 set_loop_copy (loop, cloop);
1050 /* Add it to target. */
1051 flow_loop_tree_node_add (target, cloop);
1053 return cloop;
1056 /* Copies structure of subloops of LOOP into TARGET loop, placing
1057 newly created loops into loop tree. */
1058 void
1059 duplicate_subloops (struct loop *loop, struct loop *target)
1061 struct loop *aloop, *cloop;
1063 for (aloop = loop->inner; aloop; aloop = aloop->next)
1065 cloop = duplicate_loop (aloop, target);
1066 duplicate_subloops (aloop, cloop);
1070 /* Copies structure of subloops of N loops, stored in array COPIED_LOOPS,
1071 into TARGET loop, placing newly created loops into loop tree. */
1072 static void
1073 copy_loops_to (struct loop **copied_loops, int n, struct loop *target)
1075 struct loop *aloop;
1076 int i;
1078 for (i = 0; i < n; i++)
1080 aloop = duplicate_loop (copied_loops[i], target);
1081 duplicate_subloops (copied_loops[i], aloop);
1085 /* Redirects edge E to basic block DEST. */
1086 static void
1087 loop_redirect_edge (edge e, basic_block dest)
1089 if (e->dest == dest)
1090 return;
1092 redirect_edge_and_branch_force (e, dest);
1095 /* Check whether LOOP's body can be duplicated. */
1096 bool
1097 can_duplicate_loop_p (const struct loop *loop)
1099 int ret;
1100 basic_block *bbs = get_loop_body (loop);
1102 ret = can_copy_bbs_p (bbs, loop->num_nodes);
1103 free (bbs);
1105 return ret;
1108 /* Sets probability and count of edge E to zero. The probability and count
1109 is redistributed evenly to the remaining edges coming from E->src. */
1111 static void
1112 set_zero_probability (edge e)
1114 basic_block bb = e->src;
1115 edge_iterator ei;
1116 edge ae, last = NULL;
1117 unsigned n = EDGE_COUNT (bb->succs);
1118 gcov_type cnt = e->count, cnt1;
1119 unsigned prob = e->probability, prob1;
1121 gcc_assert (n > 1);
1122 cnt1 = cnt / (n - 1);
1123 prob1 = prob / (n - 1);
1125 FOR_EACH_EDGE (ae, ei, bb->succs)
1127 if (ae == e)
1128 continue;
1130 ae->probability += prob1;
1131 ae->count += cnt1;
1132 last = ae;
1135 /* Move the rest to one of the edges. */
1136 last->probability += prob % (n - 1);
1137 last->count += cnt % (n - 1);
1139 e->probability = 0;
1140 e->count = 0;
1143 /* Duplicates body of LOOP to given edge E NDUPL times. Takes care of updating
1144 loop structure and dominators. E's destination must be LOOP header for
1145 this to work, i.e. it must be entry or latch edge of this loop; these are
1146 unique, as the loops must have preheaders for this function to work
1147 correctly (in case E is latch, the function unrolls the loop, if E is entry
1148 edge, it peels the loop). Store edges created by copying ORIG edge from
1149 copies corresponding to set bits in WONT_EXIT bitmap (bit 0 corresponds to
1150 original LOOP body, the other copies are numbered in order given by control
1151 flow through them) into TO_REMOVE array. Returns false if duplication is
1152 impossible. */
1154 bool
1155 duplicate_loop_to_header_edge (struct loop *loop, edge e,
1156 unsigned int ndupl, sbitmap wont_exit,
1157 edge orig, vec<edge> *to_remove,
1158 int flags)
1160 struct loop *target, *aloop;
1161 struct loop **orig_loops;
1162 unsigned n_orig_loops;
1163 basic_block header = loop->header, latch = loop->latch;
1164 basic_block *new_bbs, *bbs, *first_active;
1165 basic_block new_bb, bb, first_active_latch = NULL;
1166 edge ae, latch_edge;
1167 edge spec_edges[2], new_spec_edges[2];
1168 #define SE_LATCH 0
1169 #define SE_ORIG 1
1170 unsigned i, j, n;
1171 int is_latch = (latch == e->src);
1172 int scale_act = 0, *scale_step = NULL, scale_main = 0;
1173 int scale_after_exit = 0;
1174 int p, freq_in, freq_le, freq_out_orig;
1175 int prob_pass_thru, prob_pass_wont_exit, prob_pass_main;
1176 int add_irreducible_flag;
1177 basic_block place_after;
1178 bitmap bbs_to_scale = NULL;
1179 bitmap_iterator bi;
1181 gcc_assert (e->dest == loop->header);
1182 gcc_assert (ndupl > 0);
1184 if (orig)
1186 /* Orig must be edge out of the loop. */
1187 gcc_assert (flow_bb_inside_loop_p (loop, orig->src));
1188 gcc_assert (!flow_bb_inside_loop_p (loop, orig->dest));
1191 n = loop->num_nodes;
1192 bbs = get_loop_body_in_dom_order (loop);
1193 gcc_assert (bbs[0] == loop->header);
1194 gcc_assert (bbs[n - 1] == loop->latch);
1196 /* Check whether duplication is possible. */
1197 if (!can_copy_bbs_p (bbs, loop->num_nodes))
1199 free (bbs);
1200 return false;
1202 new_bbs = XNEWVEC (basic_block, loop->num_nodes);
1204 /* In case we are doing loop peeling and the loop is in the middle of
1205 irreducible region, the peeled copies will be inside it too. */
1206 add_irreducible_flag = e->flags & EDGE_IRREDUCIBLE_LOOP;
1207 gcc_assert (!is_latch || !add_irreducible_flag);
1209 /* Find edge from latch. */
1210 latch_edge = loop_latch_edge (loop);
1212 if (flags & DLTHE_FLAG_UPDATE_FREQ)
1214 /* Calculate coefficients by that we have to scale frequencies
1215 of duplicated loop bodies. */
1216 freq_in = header->frequency;
1217 freq_le = EDGE_FREQUENCY (latch_edge);
1218 if (freq_in == 0)
1219 freq_in = 1;
1220 if (freq_in < freq_le)
1221 freq_in = freq_le;
1222 freq_out_orig = orig ? EDGE_FREQUENCY (orig) : freq_in - freq_le;
1223 if (freq_out_orig > freq_in - freq_le)
1224 freq_out_orig = freq_in - freq_le;
1225 prob_pass_thru = RDIV (REG_BR_PROB_BASE * freq_le, freq_in);
1226 prob_pass_wont_exit =
1227 RDIV (REG_BR_PROB_BASE * (freq_le + freq_out_orig), freq_in);
1229 if (orig
1230 && REG_BR_PROB_BASE - orig->probability != 0)
1232 /* The blocks that are dominated by a removed exit edge ORIG have
1233 frequencies scaled by this. */
1234 scale_after_exit
1235 = GCOV_COMPUTE_SCALE (REG_BR_PROB_BASE,
1236 REG_BR_PROB_BASE - orig->probability);
1237 bbs_to_scale = BITMAP_ALLOC (NULL);
1238 for (i = 0; i < n; i++)
1240 if (bbs[i] != orig->src
1241 && dominated_by_p (CDI_DOMINATORS, bbs[i], orig->src))
1242 bitmap_set_bit (bbs_to_scale, i);
1246 scale_step = XNEWVEC (int, ndupl);
1248 for (i = 1; i <= ndupl; i++)
1249 scale_step[i - 1] = bitmap_bit_p (wont_exit, i)
1250 ? prob_pass_wont_exit
1251 : prob_pass_thru;
1253 /* Complete peeling is special as the probability of exit in last
1254 copy becomes 1. */
1255 if (flags & DLTHE_FLAG_COMPLETTE_PEEL)
1257 int wanted_freq = EDGE_FREQUENCY (e);
1259 if (wanted_freq > freq_in)
1260 wanted_freq = freq_in;
1262 gcc_assert (!is_latch);
1263 /* First copy has frequency of incoming edge. Each subsequent
1264 frequency should be reduced by prob_pass_wont_exit. Caller
1265 should've managed the flags so all except for original loop
1266 has won't exist set. */
1267 scale_act = GCOV_COMPUTE_SCALE (wanted_freq, freq_in);
1268 /* Now simulate the duplication adjustments and compute header
1269 frequency of the last copy. */
1270 for (i = 0; i < ndupl; i++)
1271 wanted_freq = combine_probabilities (wanted_freq, scale_step[i]);
1272 scale_main = GCOV_COMPUTE_SCALE (wanted_freq, freq_in);
1274 else if (is_latch)
1276 prob_pass_main = bitmap_bit_p (wont_exit, 0)
1277 ? prob_pass_wont_exit
1278 : prob_pass_thru;
1279 p = prob_pass_main;
1280 scale_main = REG_BR_PROB_BASE;
1281 for (i = 0; i < ndupl; i++)
1283 scale_main += p;
1284 p = combine_probabilities (p, scale_step[i]);
1286 scale_main = GCOV_COMPUTE_SCALE (REG_BR_PROB_BASE, scale_main);
1287 scale_act = combine_probabilities (scale_main, prob_pass_main);
1289 else
1291 scale_main = REG_BR_PROB_BASE;
1292 for (i = 0; i < ndupl; i++)
1293 scale_main = combine_probabilities (scale_main, scale_step[i]);
1294 scale_act = REG_BR_PROB_BASE - prob_pass_thru;
1296 for (i = 0; i < ndupl; i++)
1297 gcc_assert (scale_step[i] >= 0 && scale_step[i] <= REG_BR_PROB_BASE);
1298 gcc_assert (scale_main >= 0 && scale_main <= REG_BR_PROB_BASE
1299 && scale_act >= 0 && scale_act <= REG_BR_PROB_BASE);
1302 /* Loop the new bbs will belong to. */
1303 target = e->src->loop_father;
1305 /* Original loops. */
1306 n_orig_loops = 0;
1307 for (aloop = loop->inner; aloop; aloop = aloop->next)
1308 n_orig_loops++;
1309 orig_loops = XNEWVEC (struct loop *, n_orig_loops);
1310 for (aloop = loop->inner, i = 0; aloop; aloop = aloop->next, i++)
1311 orig_loops[i] = aloop;
1313 set_loop_copy (loop, target);
1315 first_active = XNEWVEC (basic_block, n);
1316 if (is_latch)
1318 memcpy (first_active, bbs, n * sizeof (basic_block));
1319 first_active_latch = latch;
1322 spec_edges[SE_ORIG] = orig;
1323 spec_edges[SE_LATCH] = latch_edge;
1325 place_after = e->src;
1326 for (j = 0; j < ndupl; j++)
1328 /* Copy loops. */
1329 copy_loops_to (orig_loops, n_orig_loops, target);
1331 /* Copy bbs. */
1332 copy_bbs (bbs, n, new_bbs, spec_edges, 2, new_spec_edges, loop,
1333 place_after, true);
1334 place_after = new_spec_edges[SE_LATCH]->src;
1336 if (flags & DLTHE_RECORD_COPY_NUMBER)
1337 for (i = 0; i < n; i++)
1339 gcc_assert (!new_bbs[i]->aux);
1340 new_bbs[i]->aux = (void *)(size_t)(j + 1);
1343 /* Note whether the blocks and edges belong to an irreducible loop. */
1344 if (add_irreducible_flag)
1346 for (i = 0; i < n; i++)
1347 new_bbs[i]->flags |= BB_DUPLICATED;
1348 for (i = 0; i < n; i++)
1350 edge_iterator ei;
1351 new_bb = new_bbs[i];
1352 if (new_bb->loop_father == target)
1353 new_bb->flags |= BB_IRREDUCIBLE_LOOP;
1355 FOR_EACH_EDGE (ae, ei, new_bb->succs)
1356 if ((ae->dest->flags & BB_DUPLICATED)
1357 && (ae->src->loop_father == target
1358 || ae->dest->loop_father == target))
1359 ae->flags |= EDGE_IRREDUCIBLE_LOOP;
1361 for (i = 0; i < n; i++)
1362 new_bbs[i]->flags &= ~BB_DUPLICATED;
1365 /* Redirect the special edges. */
1366 if (is_latch)
1368 redirect_edge_and_branch_force (latch_edge, new_bbs[0]);
1369 redirect_edge_and_branch_force (new_spec_edges[SE_LATCH],
1370 loop->header);
1371 set_immediate_dominator (CDI_DOMINATORS, new_bbs[0], latch);
1372 latch = loop->latch = new_bbs[n - 1];
1373 e = latch_edge = new_spec_edges[SE_LATCH];
1375 else
1377 redirect_edge_and_branch_force (new_spec_edges[SE_LATCH],
1378 loop->header);
1379 redirect_edge_and_branch_force (e, new_bbs[0]);
1380 set_immediate_dominator (CDI_DOMINATORS, new_bbs[0], e->src);
1381 e = new_spec_edges[SE_LATCH];
1384 /* Record exit edge in this copy. */
1385 if (orig && bitmap_bit_p (wont_exit, j + 1))
1387 if (to_remove)
1388 to_remove->safe_push (new_spec_edges[SE_ORIG]);
1389 set_zero_probability (new_spec_edges[SE_ORIG]);
1391 /* Scale the frequencies of the blocks dominated by the exit. */
1392 if (bbs_to_scale)
1394 EXECUTE_IF_SET_IN_BITMAP (bbs_to_scale, 0, i, bi)
1396 scale_bbs_frequencies_int (new_bbs + i, 1, scale_after_exit,
1397 REG_BR_PROB_BASE);
1402 /* Record the first copy in the control flow order if it is not
1403 the original loop (i.e. in case of peeling). */
1404 if (!first_active_latch)
1406 memcpy (first_active, new_bbs, n * sizeof (basic_block));
1407 first_active_latch = new_bbs[n - 1];
1410 /* Set counts and frequencies. */
1411 if (flags & DLTHE_FLAG_UPDATE_FREQ)
1413 scale_bbs_frequencies_int (new_bbs, n, scale_act, REG_BR_PROB_BASE);
1414 scale_act = combine_probabilities (scale_act, scale_step[j]);
1417 free (new_bbs);
1418 free (orig_loops);
1420 /* Record the exit edge in the original loop body, and update the frequencies. */
1421 if (orig && bitmap_bit_p (wont_exit, 0))
1423 if (to_remove)
1424 to_remove->safe_push (orig);
1425 set_zero_probability (orig);
1427 /* Scale the frequencies of the blocks dominated by the exit. */
1428 if (bbs_to_scale)
1430 EXECUTE_IF_SET_IN_BITMAP (bbs_to_scale, 0, i, bi)
1432 scale_bbs_frequencies_int (bbs + i, 1, scale_after_exit,
1433 REG_BR_PROB_BASE);
1438 /* Update the original loop. */
1439 if (!is_latch)
1440 set_immediate_dominator (CDI_DOMINATORS, e->dest, e->src);
1441 if (flags & DLTHE_FLAG_UPDATE_FREQ)
1443 scale_bbs_frequencies_int (bbs, n, scale_main, REG_BR_PROB_BASE);
1444 free (scale_step);
1447 /* Update dominators of outer blocks if affected. */
1448 for (i = 0; i < n; i++)
1450 basic_block dominated, dom_bb;
1451 vec<basic_block> dom_bbs;
1452 unsigned j;
1454 bb = bbs[i];
1455 bb->aux = 0;
1457 dom_bbs = get_dominated_by (CDI_DOMINATORS, bb);
1458 FOR_EACH_VEC_ELT (dom_bbs, j, dominated)
1460 if (flow_bb_inside_loop_p (loop, dominated))
1461 continue;
1462 dom_bb = nearest_common_dominator (
1463 CDI_DOMINATORS, first_active[i], first_active_latch);
1464 set_immediate_dominator (CDI_DOMINATORS, dominated, dom_bb);
1466 dom_bbs.release ();
1468 free (first_active);
1470 free (bbs);
1471 BITMAP_FREE (bbs_to_scale);
1473 return true;
1476 /* A callback for make_forwarder block, to redirect all edges except for
1477 MFB_KJ_EDGE to the entry part. E is the edge for that we should decide
1478 whether to redirect it. */
1480 edge mfb_kj_edge;
1481 bool
1482 mfb_keep_just (edge e)
1484 return e != mfb_kj_edge;
1487 /* True when a candidate preheader BLOCK has predecessors from LOOP. */
1489 static bool
1490 has_preds_from_loop (basic_block block, struct loop *loop)
1492 edge e;
1493 edge_iterator ei;
1495 FOR_EACH_EDGE (e, ei, block->preds)
1496 if (e->src->loop_father == loop)
1497 return true;
1498 return false;
1501 /* Creates a pre-header for a LOOP. Returns newly created block. Unless
1502 CP_SIMPLE_PREHEADERS is set in FLAGS, we only force LOOP to have single
1503 entry; otherwise we also force preheader block to have only one successor.
1504 When CP_FALLTHRU_PREHEADERS is set in FLAGS, we force the preheader block
1505 to be a fallthru predecessor to the loop header and to have only
1506 predecessors from outside of the loop.
1507 The function also updates dominators. */
1509 basic_block
1510 create_preheader (struct loop *loop, int flags)
1512 edge e, fallthru;
1513 basic_block dummy;
1514 int nentry = 0;
1515 bool irred = false;
1516 bool latch_edge_was_fallthru;
1517 edge one_succ_pred = NULL, single_entry = NULL;
1518 edge_iterator ei;
1520 FOR_EACH_EDGE (e, ei, loop->header->preds)
1522 if (e->src == loop->latch)
1523 continue;
1524 irred |= (e->flags & EDGE_IRREDUCIBLE_LOOP) != 0;
1525 nentry++;
1526 single_entry = e;
1527 if (single_succ_p (e->src))
1528 one_succ_pred = e;
1530 gcc_assert (nentry);
1531 if (nentry == 1)
1533 bool need_forwarder_block = false;
1535 /* We do not allow entry block to be the loop preheader, since we
1536 cannot emit code there. */
1537 if (single_entry->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1538 need_forwarder_block = true;
1539 else
1541 /* If we want simple preheaders, also force the preheader to have
1542 just a single successor. */
1543 if ((flags & CP_SIMPLE_PREHEADERS)
1544 && !single_succ_p (single_entry->src))
1545 need_forwarder_block = true;
1546 /* If we want fallthru preheaders, also create forwarder block when
1547 preheader ends with a jump or has predecessors from loop. */
1548 else if ((flags & CP_FALLTHRU_PREHEADERS)
1549 && (JUMP_P (BB_END (single_entry->src))
1550 || has_preds_from_loop (single_entry->src, loop)))
1551 need_forwarder_block = true;
1553 if (! need_forwarder_block)
1554 return NULL;
1557 mfb_kj_edge = loop_latch_edge (loop);
1558 latch_edge_was_fallthru = (mfb_kj_edge->flags & EDGE_FALLTHRU) != 0;
1559 fallthru = make_forwarder_block (loop->header, mfb_keep_just, NULL);
1560 dummy = fallthru->src;
1561 loop->header = fallthru->dest;
1563 /* Try to be clever in placing the newly created preheader. The idea is to
1564 avoid breaking any "fallthruness" relationship between blocks.
1566 The preheader was created just before the header and all incoming edges
1567 to the header were redirected to the preheader, except the latch edge.
1568 So the only problematic case is when this latch edge was a fallthru
1569 edge: it is not anymore after the preheader creation so we have broken
1570 the fallthruness. We're therefore going to look for a better place. */
1571 if (latch_edge_was_fallthru)
1573 if (one_succ_pred)
1574 e = one_succ_pred;
1575 else
1576 e = EDGE_PRED (dummy, 0);
1578 move_block_after (dummy, e->src);
1581 if (irred)
1583 dummy->flags |= BB_IRREDUCIBLE_LOOP;
1584 single_succ_edge (dummy)->flags |= EDGE_IRREDUCIBLE_LOOP;
1587 if (dump_file)
1588 fprintf (dump_file, "Created preheader block for loop %i\n",
1589 loop->num);
1591 if (flags & CP_FALLTHRU_PREHEADERS)
1592 gcc_assert ((single_succ_edge (dummy)->flags & EDGE_FALLTHRU)
1593 && !JUMP_P (BB_END (dummy)));
1595 return dummy;
1598 /* Create preheaders for each loop; for meaning of FLAGS see create_preheader. */
1600 void
1601 create_preheaders (int flags)
1603 struct loop *loop;
1605 if (!current_loops)
1606 return;
1608 FOR_EACH_LOOP (loop, 0)
1609 create_preheader (loop, flags);
1610 loops_state_set (LOOPS_HAVE_PREHEADERS);
1613 /* Forces all loop latches to have only single successor. */
1615 void
1616 force_single_succ_latches (void)
1618 struct loop *loop;
1619 edge e;
1621 FOR_EACH_LOOP (loop, 0)
1623 if (loop->latch != loop->header && single_succ_p (loop->latch))
1624 continue;
1626 e = find_edge (loop->latch, loop->header);
1627 gcc_checking_assert (e != NULL);
1629 split_edge (e);
1631 loops_state_set (LOOPS_HAVE_SIMPLE_LATCHES);
1634 /* This function is called from loop_version. It splits the entry edge
1635 of the loop we want to version, adds the versioning condition, and
1636 adjust the edges to the two versions of the loop appropriately.
1637 e is an incoming edge. Returns the basic block containing the
1638 condition.
1640 --- edge e ---- > [second_head]
1642 Split it and insert new conditional expression and adjust edges.
1644 --- edge e ---> [cond expr] ---> [first_head]
1646 +---------> [second_head]
1648 THEN_PROB is the probability of then branch of the condition. */
1650 static basic_block
1651 lv_adjust_loop_entry_edge (basic_block first_head, basic_block second_head,
1652 edge e, void *cond_expr, unsigned then_prob)
1654 basic_block new_head = NULL;
1655 edge e1;
1657 gcc_assert (e->dest == second_head);
1659 /* Split edge 'e'. This will create a new basic block, where we can
1660 insert conditional expr. */
1661 new_head = split_edge (e);
1663 lv_add_condition_to_bb (first_head, second_head, new_head,
1664 cond_expr);
1666 /* Don't set EDGE_TRUE_VALUE in RTL mode, as it's invalid there. */
1667 e = single_succ_edge (new_head);
1668 e1 = make_edge (new_head, first_head,
1669 current_ir_type () == IR_GIMPLE ? EDGE_TRUE_VALUE : 0);
1670 e1->probability = then_prob;
1671 e->probability = REG_BR_PROB_BASE - then_prob;
1672 e1->count = apply_probability (e->count, e1->probability);
1673 e->count = apply_probability (e->count, e->probability);
1675 set_immediate_dominator (CDI_DOMINATORS, first_head, new_head);
1676 set_immediate_dominator (CDI_DOMINATORS, second_head, new_head);
1678 /* Adjust loop header phi nodes. */
1679 lv_adjust_loop_header_phi (first_head, second_head, new_head, e1);
1681 return new_head;
1684 /* Main entry point for Loop Versioning transformation.
1686 This transformation given a condition and a loop, creates
1687 -if (condition) { loop_copy1 } else { loop_copy2 },
1688 where loop_copy1 is the loop transformed in one way, and loop_copy2
1689 is the loop transformed in another way (or unchanged). 'condition'
1690 may be a run time test for things that were not resolved by static
1691 analysis (overlapping ranges (anti-aliasing), alignment, etc.).
1693 THEN_PROB is the probability of the then edge of the if. THEN_SCALE
1694 is the ratio by that the frequencies in the original loop should
1695 be scaled. ELSE_SCALE is the ratio by that the frequencies in the
1696 new loop should be scaled.
1698 If PLACE_AFTER is true, we place the new loop after LOOP in the
1699 instruction stream, otherwise it is placed before LOOP. */
1701 struct loop *
1702 loop_version (struct loop *loop,
1703 void *cond_expr, basic_block *condition_bb,
1704 unsigned then_prob, unsigned then_scale, unsigned else_scale,
1705 bool place_after)
1707 basic_block first_head, second_head;
1708 edge entry, latch_edge, true_edge, false_edge;
1709 int irred_flag;
1710 struct loop *nloop;
1711 basic_block cond_bb;
1713 /* Record entry and latch edges for the loop */
1714 entry = loop_preheader_edge (loop);
1715 irred_flag = entry->flags & EDGE_IRREDUCIBLE_LOOP;
1716 entry->flags &= ~EDGE_IRREDUCIBLE_LOOP;
1718 /* Note down head of loop as first_head. */
1719 first_head = entry->dest;
1721 /* Duplicate loop. */
1722 if (!cfg_hook_duplicate_loop_to_header_edge (loop, entry, 1,
1723 NULL, NULL, NULL, 0))
1725 entry->flags |= irred_flag;
1726 return NULL;
1729 /* After duplication entry edge now points to new loop head block.
1730 Note down new head as second_head. */
1731 second_head = entry->dest;
1733 /* Split loop entry edge and insert new block with cond expr. */
1734 cond_bb = lv_adjust_loop_entry_edge (first_head, second_head,
1735 entry, cond_expr, then_prob);
1736 if (condition_bb)
1737 *condition_bb = cond_bb;
1739 if (!cond_bb)
1741 entry->flags |= irred_flag;
1742 return NULL;
1745 latch_edge = single_succ_edge (get_bb_copy (loop->latch));
1747 extract_cond_bb_edges (cond_bb, &true_edge, &false_edge);
1748 nloop = loopify (latch_edge,
1749 single_pred_edge (get_bb_copy (loop->header)),
1750 cond_bb, true_edge, false_edge,
1751 false /* Do not redirect all edges. */,
1752 then_scale, else_scale);
1754 copy_loop_info (loop, nloop);
1756 /* loopify redirected latch_edge. Update its PENDING_STMTS. */
1757 lv_flush_pending_stmts (latch_edge);
1759 /* loopify redirected condition_bb's succ edge. Update its PENDING_STMTS. */
1760 extract_cond_bb_edges (cond_bb, &true_edge, &false_edge);
1761 lv_flush_pending_stmts (false_edge);
1762 /* Adjust irreducible flag. */
1763 if (irred_flag)
1765 cond_bb->flags |= BB_IRREDUCIBLE_LOOP;
1766 loop_preheader_edge (loop)->flags |= EDGE_IRREDUCIBLE_LOOP;
1767 loop_preheader_edge (nloop)->flags |= EDGE_IRREDUCIBLE_LOOP;
1768 single_pred_edge (cond_bb)->flags |= EDGE_IRREDUCIBLE_LOOP;
1771 if (place_after)
1773 basic_block *bbs = get_loop_body_in_dom_order (nloop), after;
1774 unsigned i;
1776 after = loop->latch;
1778 for (i = 0; i < nloop->num_nodes; i++)
1780 move_block_after (bbs[i], after);
1781 after = bbs[i];
1783 free (bbs);
1786 /* At this point condition_bb is loop preheader with two successors,
1787 first_head and second_head. Make sure that loop preheader has only
1788 one successor. */
1789 split_edge (loop_preheader_edge (loop));
1790 split_edge (loop_preheader_edge (nloop));
1792 return nloop;