2015-06-11 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / cfgloopmanip.c
blob8b075343bb54bb24356ea249e39390bde31aaefb
1 /* Loop manipulation code for GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "predict.h"
26 #include "symtab.h"
27 #include "hard-reg-set.h"
28 #include "input.h"
29 #include "function.h"
30 #include "dominance.h"
31 #include "cfg.h"
32 #include "cfganal.h"
33 #include "basic-block.h"
34 #include "cfgloop.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "tree-ssa-alias.h"
38 #include "internal-fn.h"
39 #include "gimple-expr.h"
40 #include "is-a.h"
41 #include "gimple.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "tree-ssa-loop-manip.h"
45 #include "dumpfile.h"
47 static void copy_loops_to (struct loop **, int,
48 struct loop *);
49 static void loop_redirect_edge (edge, basic_block);
50 static void remove_bbs (basic_block *, int);
51 static bool rpe_enum_p (const_basic_block, const void *);
52 static int find_path (edge, basic_block **);
53 static void fix_loop_placements (struct loop *, bool *);
54 static bool fix_bb_placement (basic_block);
55 static void fix_bb_placements (basic_block, bool *, bitmap);
57 /* Checks whether basic block BB is dominated by DATA. */
58 static bool
59 rpe_enum_p (const_basic_block bb, const void *data)
61 return dominated_by_p (CDI_DOMINATORS, bb, (const_basic_block) data);
64 /* Remove basic blocks BBS. NBBS is the number of the basic blocks. */
66 static void
67 remove_bbs (basic_block *bbs, int nbbs)
69 int i;
71 for (i = 0; i < nbbs; i++)
72 delete_basic_block (bbs[i]);
75 /* Find path -- i.e. the basic blocks dominated by edge E and put them
76 into array BBS, that will be allocated large enough to contain them.
77 E->dest must have exactly one predecessor for this to work (it is
78 easy to achieve and we do not put it here because we do not want to
79 alter anything by this function). The number of basic blocks in the
80 path is returned. */
81 static int
82 find_path (edge e, basic_block **bbs)
84 gcc_assert (EDGE_COUNT (e->dest->preds) <= 1);
86 /* Find bbs in the path. */
87 *bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
88 return dfs_enumerate_from (e->dest, 0, rpe_enum_p, *bbs,
89 n_basic_blocks_for_fn (cfun), e->dest);
92 /* Fix placement of basic block BB inside loop hierarchy --
93 Let L be a loop to that BB belongs. Then every successor of BB must either
94 1) belong to some superloop of loop L, or
95 2) be a header of loop K such that K->outer is superloop of L
96 Returns true if we had to move BB into other loop to enforce this condition,
97 false if the placement of BB was already correct (provided that placements
98 of its successors are correct). */
99 static bool
100 fix_bb_placement (basic_block bb)
102 edge e;
103 edge_iterator ei;
104 struct loop *loop = current_loops->tree_root, *act;
106 FOR_EACH_EDGE (e, ei, bb->succs)
108 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
109 continue;
111 act = e->dest->loop_father;
112 if (act->header == e->dest)
113 act = loop_outer (act);
115 if (flow_loop_nested_p (loop, act))
116 loop = act;
119 if (loop == bb->loop_father)
120 return false;
122 remove_bb_from_loops (bb);
123 add_bb_to_loop (bb, loop);
125 return true;
128 /* Fix placement of LOOP inside loop tree, i.e. find the innermost superloop
129 of LOOP to that leads at least one exit edge of LOOP, and set it
130 as the immediate superloop of LOOP. Return true if the immediate superloop
131 of LOOP changed.
133 IRRED_INVALIDATED is set to true if a change in the loop structures might
134 invalidate the information about irreducible regions. */
136 static bool
137 fix_loop_placement (struct loop *loop, bool *irred_invalidated)
139 unsigned i;
140 edge e;
141 vec<edge> exits = get_loop_exit_edges (loop);
142 struct loop *father = current_loops->tree_root, *act;
143 bool ret = false;
145 FOR_EACH_VEC_ELT (exits, i, e)
147 act = find_common_loop (loop, e->dest->loop_father);
148 if (flow_loop_nested_p (father, act))
149 father = act;
152 if (father != loop_outer (loop))
154 for (act = loop_outer (loop); act != father; act = loop_outer (act))
155 act->num_nodes -= loop->num_nodes;
156 flow_loop_tree_node_remove (loop);
157 flow_loop_tree_node_add (father, loop);
159 /* The exit edges of LOOP no longer exits its original immediate
160 superloops; remove them from the appropriate exit lists. */
161 FOR_EACH_VEC_ELT (exits, i, e)
163 /* We may need to recompute irreducible loops. */
164 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
165 *irred_invalidated = true;
166 rescan_loop_exit (e, false, false);
169 ret = true;
172 exits.release ();
173 return ret;
176 /* Fix placements of basic blocks inside loop hierarchy stored in loops; i.e.
177 enforce condition condition stated in description of fix_bb_placement. We
178 start from basic block FROM that had some of its successors removed, so that
179 his placement no longer has to be correct, and iteratively fix placement of
180 its predecessors that may change if placement of FROM changed. Also fix
181 placement of subloops of FROM->loop_father, that might also be altered due
182 to this change; the condition for them is similar, except that instead of
183 successors we consider edges coming out of the loops.
185 If the changes may invalidate the information about irreducible regions,
186 IRRED_INVALIDATED is set to true.
188 If LOOP_CLOSED_SSA_INVLIDATED is non-zero then all basic blocks with
189 changed loop_father are collected there. */
191 static void
192 fix_bb_placements (basic_block from,
193 bool *irred_invalidated,
194 bitmap loop_closed_ssa_invalidated)
196 sbitmap in_queue;
197 basic_block *queue, *qtop, *qbeg, *qend;
198 struct loop *base_loop, *target_loop;
199 edge e;
201 /* We pass through blocks back-reachable from FROM, testing whether some
202 of their successors moved to outer loop. It may be necessary to
203 iterate several times, but it is finite, as we stop unless we move
204 the basic block up the loop structure. The whole story is a bit
205 more complicated due to presence of subloops, those are moved using
206 fix_loop_placement. */
208 base_loop = from->loop_father;
209 /* If we are already in the outermost loop, the basic blocks cannot be moved
210 outside of it. If FROM is the header of the base loop, it cannot be moved
211 outside of it, either. In both cases, we can end now. */
212 if (base_loop == current_loops->tree_root
213 || from == base_loop->header)
214 return;
216 in_queue = sbitmap_alloc (last_basic_block_for_fn (cfun));
217 bitmap_clear (in_queue);
218 bitmap_set_bit (in_queue, from->index);
219 /* Prevent us from going out of the base_loop. */
220 bitmap_set_bit (in_queue, base_loop->header->index);
222 queue = XNEWVEC (basic_block, base_loop->num_nodes + 1);
223 qtop = queue + base_loop->num_nodes + 1;
224 qbeg = queue;
225 qend = queue + 1;
226 *qbeg = from;
228 while (qbeg != qend)
230 edge_iterator ei;
231 from = *qbeg;
232 qbeg++;
233 if (qbeg == qtop)
234 qbeg = queue;
235 bitmap_clear_bit (in_queue, from->index);
237 if (from->loop_father->header == from)
239 /* Subloop header, maybe move the loop upward. */
240 if (!fix_loop_placement (from->loop_father, irred_invalidated))
241 continue;
242 target_loop = loop_outer (from->loop_father);
243 if (loop_closed_ssa_invalidated)
245 basic_block *bbs = get_loop_body (from->loop_father);
246 for (unsigned i = 0; i < from->loop_father->num_nodes; ++i)
247 bitmap_set_bit (loop_closed_ssa_invalidated, bbs[i]->index);
248 free (bbs);
251 else
253 /* Ordinary basic block. */
254 if (!fix_bb_placement (from))
255 continue;
256 target_loop = from->loop_father;
257 if (loop_closed_ssa_invalidated)
258 bitmap_set_bit (loop_closed_ssa_invalidated, from->index);
261 FOR_EACH_EDGE (e, ei, from->succs)
263 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
264 *irred_invalidated = true;
267 /* Something has changed, insert predecessors into queue. */
268 FOR_EACH_EDGE (e, ei, from->preds)
270 basic_block pred = e->src;
271 struct loop *nca;
273 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
274 *irred_invalidated = true;
276 if (bitmap_bit_p (in_queue, pred->index))
277 continue;
279 /* If it is subloop, then it either was not moved, or
280 the path up the loop tree from base_loop do not contain
281 it. */
282 nca = find_common_loop (pred->loop_father, base_loop);
283 if (pred->loop_father != base_loop
284 && (nca == base_loop
285 || nca != pred->loop_father))
286 pred = pred->loop_father->header;
287 else if (!flow_loop_nested_p (target_loop, pred->loop_father))
289 /* If PRED is already higher in the loop hierarchy than the
290 TARGET_LOOP to that we moved FROM, the change of the position
291 of FROM does not affect the position of PRED, so there is no
292 point in processing it. */
293 continue;
296 if (bitmap_bit_p (in_queue, pred->index))
297 continue;
299 /* Schedule the basic block. */
300 *qend = pred;
301 qend++;
302 if (qend == qtop)
303 qend = queue;
304 bitmap_set_bit (in_queue, pred->index);
307 free (in_queue);
308 free (queue);
311 /* Removes path beginning at edge E, i.e. remove basic blocks dominated by E
312 and update loop structures and dominators. Return true if we were able
313 to remove the path, false otherwise (and nothing is affected then). */
314 bool
315 remove_path (edge e)
317 edge ae;
318 basic_block *rem_bbs, *bord_bbs, from, bb;
319 vec<basic_block> dom_bbs;
320 int i, nrem, n_bord_bbs;
321 sbitmap seen;
322 bool irred_invalidated = false;
323 edge_iterator ei;
324 struct loop *l, *f;
326 if (!can_remove_branch_p (e))
327 return false;
329 /* Keep track of whether we need to update information about irreducible
330 regions. This is the case if the removed area is a part of the
331 irreducible region, or if the set of basic blocks that belong to a loop
332 that is inside an irreducible region is changed, or if such a loop is
333 removed. */
334 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
335 irred_invalidated = true;
337 /* We need to check whether basic blocks are dominated by the edge
338 e, but we only have basic block dominators. This is easy to
339 fix -- when e->dest has exactly one predecessor, this corresponds
340 to blocks dominated by e->dest, if not, split the edge. */
341 if (!single_pred_p (e->dest))
342 e = single_pred_edge (split_edge (e));
344 /* It may happen that by removing path we remove one or more loops
345 we belong to. In this case first unloop the loops, then proceed
346 normally. We may assume that e->dest is not a header of any loop,
347 as it now has exactly one predecessor. */
348 for (l = e->src->loop_father; loop_outer (l); l = f)
350 f = loop_outer (l);
351 if (dominated_by_p (CDI_DOMINATORS, l->latch, e->dest))
352 unloop (l, &irred_invalidated, NULL);
355 /* Identify the path. */
356 nrem = find_path (e, &rem_bbs);
358 n_bord_bbs = 0;
359 bord_bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
360 seen = sbitmap_alloc (last_basic_block_for_fn (cfun));
361 bitmap_clear (seen);
363 /* Find "border" hexes -- i.e. those with predecessor in removed path. */
364 for (i = 0; i < nrem; i++)
365 bitmap_set_bit (seen, rem_bbs[i]->index);
366 if (!irred_invalidated)
367 FOR_EACH_EDGE (ae, ei, e->src->succs)
368 if (ae != e && ae->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
369 && !bitmap_bit_p (seen, ae->dest->index)
370 && ae->flags & EDGE_IRREDUCIBLE_LOOP)
372 irred_invalidated = true;
373 break;
376 for (i = 0; i < nrem; i++)
378 bb = rem_bbs[i];
379 FOR_EACH_EDGE (ae, ei, rem_bbs[i]->succs)
380 if (ae->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
381 && !bitmap_bit_p (seen, ae->dest->index))
383 bitmap_set_bit (seen, ae->dest->index);
384 bord_bbs[n_bord_bbs++] = ae->dest;
386 if (ae->flags & EDGE_IRREDUCIBLE_LOOP)
387 irred_invalidated = true;
391 /* Remove the path. */
392 from = e->src;
393 remove_branch (e);
394 dom_bbs.create (0);
396 /* Cancel loops contained in the path. */
397 for (i = 0; i < nrem; i++)
398 if (rem_bbs[i]->loop_father->header == rem_bbs[i])
399 cancel_loop_tree (rem_bbs[i]->loop_father);
401 remove_bbs (rem_bbs, nrem);
402 free (rem_bbs);
404 /* Find blocks whose dominators may be affected. */
405 bitmap_clear (seen);
406 for (i = 0; i < n_bord_bbs; i++)
408 basic_block ldom;
410 bb = get_immediate_dominator (CDI_DOMINATORS, bord_bbs[i]);
411 if (bitmap_bit_p (seen, bb->index))
412 continue;
413 bitmap_set_bit (seen, bb->index);
415 for (ldom = first_dom_son (CDI_DOMINATORS, bb);
416 ldom;
417 ldom = next_dom_son (CDI_DOMINATORS, ldom))
418 if (!dominated_by_p (CDI_DOMINATORS, from, ldom))
419 dom_bbs.safe_push (ldom);
422 free (seen);
424 /* Recount dominators. */
425 iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, true);
426 dom_bbs.release ();
427 free (bord_bbs);
429 /* Fix placements of basic blocks inside loops and the placement of
430 loops in the loop tree. */
431 fix_bb_placements (from, &irred_invalidated, NULL);
432 fix_loop_placements (from->loop_father, &irred_invalidated);
434 if (irred_invalidated
435 && loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS))
436 mark_irreducible_loops ();
438 return true;
441 /* Creates place for a new LOOP in loops structure of FN. */
443 void
444 place_new_loop (struct function *fn, struct loop *loop)
446 loop->num = number_of_loops (fn);
447 vec_safe_push (loops_for_fn (fn)->larray, loop);
450 /* Given LOOP structure with filled header and latch, find the body of the
451 corresponding loop and add it to loops tree. Insert the LOOP as a son of
452 outer. */
454 void
455 add_loop (struct loop *loop, struct loop *outer)
457 basic_block *bbs;
458 int i, n;
459 struct loop *subloop;
460 edge e;
461 edge_iterator ei;
463 /* Add it to loop structure. */
464 place_new_loop (cfun, loop);
465 flow_loop_tree_node_add (outer, loop);
467 /* Find its nodes. */
468 bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
469 n = get_loop_body_with_size (loop, bbs, n_basic_blocks_for_fn (cfun));
471 for (i = 0; i < n; i++)
473 if (bbs[i]->loop_father == outer)
475 remove_bb_from_loops (bbs[i]);
476 add_bb_to_loop (bbs[i], loop);
477 continue;
480 loop->num_nodes++;
482 /* If we find a direct subloop of OUTER, move it to LOOP. */
483 subloop = bbs[i]->loop_father;
484 if (loop_outer (subloop) == outer
485 && subloop->header == bbs[i])
487 flow_loop_tree_node_remove (subloop);
488 flow_loop_tree_node_add (loop, subloop);
492 /* Update the information about loop exit edges. */
493 for (i = 0; i < n; i++)
495 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
497 rescan_loop_exit (e, false, false);
501 free (bbs);
504 /* Multiply all frequencies in LOOP by NUM/DEN. */
506 void
507 scale_loop_frequencies (struct loop *loop, int num, int den)
509 basic_block *bbs;
511 bbs = get_loop_body (loop);
512 scale_bbs_frequencies_int (bbs, loop->num_nodes, num, den);
513 free (bbs);
516 /* Multiply all frequencies in LOOP by SCALE/REG_BR_PROB_BASE.
517 If ITERATION_BOUND is non-zero, scale even further if loop is predicted
518 to iterate too many times. */
520 void
521 scale_loop_profile (struct loop *loop, int scale, gcov_type iteration_bound)
523 gcov_type iterations = expected_loop_iterations_unbounded (loop);
524 edge e;
525 edge_iterator ei;
527 if (dump_file && (dump_flags & TDF_DETAILS))
528 fprintf (dump_file, ";; Scaling loop %i with scale %f, "
529 "bounding iterations to %i from guessed %i\n",
530 loop->num, (double)scale / REG_BR_PROB_BASE,
531 (int)iteration_bound, (int)iterations);
533 /* See if loop is predicted to iterate too many times. */
534 if (iteration_bound && iterations > 0
535 && apply_probability (iterations, scale) > iteration_bound)
537 /* Fixing loop profile for different trip count is not trivial; the exit
538 probabilities has to be updated to match and frequencies propagated down
539 to the loop body.
541 We fully update only the simple case of loop with single exit that is
542 either from the latch or BB just before latch and leads from BB with
543 simple conditional jump. This is OK for use in vectorizer. */
544 e = single_exit (loop);
545 if (e)
547 edge other_e;
548 int freq_delta;
549 gcov_type count_delta;
551 FOR_EACH_EDGE (other_e, ei, e->src->succs)
552 if (!(other_e->flags & (EDGE_ABNORMAL | EDGE_FAKE))
553 && e != other_e)
554 break;
556 /* Probability of exit must be 1/iterations. */
557 freq_delta = EDGE_FREQUENCY (e);
558 e->probability = REG_BR_PROB_BASE / iteration_bound;
559 other_e->probability = inverse_probability (e->probability);
560 freq_delta -= EDGE_FREQUENCY (e);
562 /* Adjust counts accordingly. */
563 count_delta = e->count;
564 e->count = apply_probability (e->src->count, e->probability);
565 other_e->count = apply_probability (e->src->count, other_e->probability);
566 count_delta -= e->count;
568 /* If latch exists, change its frequency and count, since we changed
569 probability of exit. Theoretically we should update everything from
570 source of exit edge to latch, but for vectorizer this is enough. */
571 if (loop->latch
572 && loop->latch != e->src)
574 loop->latch->frequency += freq_delta;
575 if (loop->latch->frequency < 0)
576 loop->latch->frequency = 0;
577 loop->latch->count += count_delta;
578 if (loop->latch->count < 0)
579 loop->latch->count = 0;
583 /* Roughly speaking we want to reduce the loop body profile by the
584 the difference of loop iterations. We however can do better if
585 we look at the actual profile, if it is available. */
586 scale = RDIV (iteration_bound * scale, iterations);
587 if (loop->header->count)
589 gcov_type count_in = 0;
591 FOR_EACH_EDGE (e, ei, loop->header->preds)
592 if (e->src != loop->latch)
593 count_in += e->count;
595 if (count_in != 0)
596 scale = GCOV_COMPUTE_SCALE (count_in * iteration_bound,
597 loop->header->count);
599 else if (loop->header->frequency)
601 int freq_in = 0;
603 FOR_EACH_EDGE (e, ei, loop->header->preds)
604 if (e->src != loop->latch)
605 freq_in += EDGE_FREQUENCY (e);
607 if (freq_in != 0)
608 scale = GCOV_COMPUTE_SCALE (freq_in * iteration_bound,
609 loop->header->frequency);
611 if (!scale)
612 scale = 1;
615 if (scale == REG_BR_PROB_BASE)
616 return;
618 /* Scale the actual probabilities. */
619 scale_loop_frequencies (loop, scale, REG_BR_PROB_BASE);
620 if (dump_file && (dump_flags & TDF_DETAILS))
621 fprintf (dump_file, ";; guessed iterations are now %i\n",
622 (int)expected_loop_iterations_unbounded (loop));
625 /* Recompute dominance information for basic blocks outside LOOP. */
627 static void
628 update_dominators_in_loop (struct loop *loop)
630 vec<basic_block> dom_bbs = vNULL;
631 sbitmap seen;
632 basic_block *body;
633 unsigned i;
635 seen = sbitmap_alloc (last_basic_block_for_fn (cfun));
636 bitmap_clear (seen);
637 body = get_loop_body (loop);
639 for (i = 0; i < loop->num_nodes; i++)
640 bitmap_set_bit (seen, body[i]->index);
642 for (i = 0; i < loop->num_nodes; i++)
644 basic_block ldom;
646 for (ldom = first_dom_son (CDI_DOMINATORS, body[i]);
647 ldom;
648 ldom = next_dom_son (CDI_DOMINATORS, ldom))
649 if (!bitmap_bit_p (seen, ldom->index))
651 bitmap_set_bit (seen, ldom->index);
652 dom_bbs.safe_push (ldom);
656 iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, false);
657 free (body);
658 free (seen);
659 dom_bbs.release ();
662 /* Creates an if region as shown above. CONDITION is used to create
663 the test for the if.
666 | ------------- -------------
667 | | pred_bb | | pred_bb |
668 | ------------- -------------
669 | | |
670 | | | ENTRY_EDGE
671 | | ENTRY_EDGE V
672 | | ====> -------------
673 | | | cond_bb |
674 | | | CONDITION |
675 | | -------------
676 | V / \
677 | ------------- e_false / \ e_true
678 | | succ_bb | V V
679 | ------------- ----------- -----------
680 | | false_bb | | true_bb |
681 | ----------- -----------
682 | \ /
683 | \ /
684 | V V
685 | -------------
686 | | join_bb |
687 | -------------
688 | | exit_edge (result)
690 | -----------
691 | | succ_bb |
692 | -----------
696 edge
697 create_empty_if_region_on_edge (edge entry_edge, tree condition)
700 basic_block cond_bb, true_bb, false_bb, join_bb;
701 edge e_true, e_false, exit_edge;
702 gcond *cond_stmt;
703 tree simple_cond;
704 gimple_stmt_iterator gsi;
706 cond_bb = split_edge (entry_edge);
708 /* Insert condition in cond_bb. */
709 gsi = gsi_last_bb (cond_bb);
710 simple_cond =
711 force_gimple_operand_gsi (&gsi, condition, true, NULL,
712 false, GSI_NEW_STMT);
713 cond_stmt = gimple_build_cond_from_tree (simple_cond, NULL_TREE, NULL_TREE);
714 gsi = gsi_last_bb (cond_bb);
715 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
717 join_bb = split_edge (single_succ_edge (cond_bb));
719 e_true = single_succ_edge (cond_bb);
720 true_bb = split_edge (e_true);
722 e_false = make_edge (cond_bb, join_bb, 0);
723 false_bb = split_edge (e_false);
725 e_true->flags &= ~EDGE_FALLTHRU;
726 e_true->flags |= EDGE_TRUE_VALUE;
727 e_false->flags &= ~EDGE_FALLTHRU;
728 e_false->flags |= EDGE_FALSE_VALUE;
730 set_immediate_dominator (CDI_DOMINATORS, cond_bb, entry_edge->src);
731 set_immediate_dominator (CDI_DOMINATORS, true_bb, cond_bb);
732 set_immediate_dominator (CDI_DOMINATORS, false_bb, cond_bb);
733 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
735 exit_edge = single_succ_edge (join_bb);
737 if (single_pred_p (exit_edge->dest))
738 set_immediate_dominator (CDI_DOMINATORS, exit_edge->dest, join_bb);
740 return exit_edge;
743 /* create_empty_loop_on_edge
745 | - pred_bb - ------ pred_bb ------
746 | | | | iv0 = initial_value |
747 | -----|----- ---------|-----------
748 | | ______ | entry_edge
749 | | entry_edge / | |
750 | | ====> | -V---V- loop_header -------------
751 | V | | iv_before = phi (iv0, iv_after) |
752 | - succ_bb - | ---|-----------------------------
753 | | | | |
754 | ----------- | ---V--- loop_body ---------------
755 | | | iv_after = iv_before + stride |
756 | | | if (iv_before < upper_bound) |
757 | | ---|--------------\--------------
758 | | | \ exit_e
759 | | V \
760 | | - loop_latch - V- succ_bb -
761 | | | | | |
762 | | /------------- -----------
763 | \ ___ /
765 Creates an empty loop as shown above, the IV_BEFORE is the SSA_NAME
766 that is used before the increment of IV. IV_BEFORE should be used for
767 adding code to the body that uses the IV. OUTER is the outer loop in
768 which the new loop should be inserted.
770 Both INITIAL_VALUE and UPPER_BOUND expressions are gimplified and
771 inserted on the loop entry edge. This implies that this function
772 should be used only when the UPPER_BOUND expression is a loop
773 invariant. */
775 struct loop *
776 create_empty_loop_on_edge (edge entry_edge,
777 tree initial_value,
778 tree stride, tree upper_bound,
779 tree iv,
780 tree *iv_before,
781 tree *iv_after,
782 struct loop *outer)
784 basic_block loop_header, loop_latch, succ_bb, pred_bb;
785 struct loop *loop;
786 gimple_stmt_iterator gsi;
787 gimple_seq stmts;
788 gcond *cond_expr;
789 tree exit_test;
790 edge exit_e;
791 int prob;
793 gcc_assert (entry_edge && initial_value && stride && upper_bound && iv);
795 /* Create header, latch and wire up the loop. */
796 pred_bb = entry_edge->src;
797 loop_header = split_edge (entry_edge);
798 loop_latch = split_edge (single_succ_edge (loop_header));
799 succ_bb = single_succ (loop_latch);
800 make_edge (loop_header, succ_bb, 0);
801 redirect_edge_succ_nodup (single_succ_edge (loop_latch), loop_header);
803 /* Set immediate dominator information. */
804 set_immediate_dominator (CDI_DOMINATORS, loop_header, pred_bb);
805 set_immediate_dominator (CDI_DOMINATORS, loop_latch, loop_header);
806 set_immediate_dominator (CDI_DOMINATORS, succ_bb, loop_header);
808 /* Initialize a loop structure and put it in a loop hierarchy. */
809 loop = alloc_loop ();
810 loop->header = loop_header;
811 loop->latch = loop_latch;
812 add_loop (loop, outer);
814 /* TODO: Fix frequencies and counts. */
815 prob = REG_BR_PROB_BASE / 2;
817 scale_loop_frequencies (loop, REG_BR_PROB_BASE - prob, REG_BR_PROB_BASE);
819 /* Update dominators. */
820 update_dominators_in_loop (loop);
822 /* Modify edge flags. */
823 exit_e = single_exit (loop);
824 exit_e->flags = EDGE_LOOP_EXIT | EDGE_FALSE_VALUE;
825 single_pred_edge (loop_latch)->flags = EDGE_TRUE_VALUE;
827 /* Construct IV code in loop. */
828 initial_value = force_gimple_operand (initial_value, &stmts, true, iv);
829 if (stmts)
831 gsi_insert_seq_on_edge (loop_preheader_edge (loop), stmts);
832 gsi_commit_edge_inserts ();
835 upper_bound = force_gimple_operand (upper_bound, &stmts, true, NULL);
836 if (stmts)
838 gsi_insert_seq_on_edge (loop_preheader_edge (loop), stmts);
839 gsi_commit_edge_inserts ();
842 gsi = gsi_last_bb (loop_header);
843 create_iv (initial_value, stride, iv, loop, &gsi, false,
844 iv_before, iv_after);
846 /* Insert loop exit condition. */
847 cond_expr = gimple_build_cond
848 (LT_EXPR, *iv_before, upper_bound, NULL_TREE, NULL_TREE);
850 exit_test = gimple_cond_lhs (cond_expr);
851 exit_test = force_gimple_operand_gsi (&gsi, exit_test, true, NULL,
852 false, GSI_NEW_STMT);
853 gimple_cond_set_lhs (cond_expr, exit_test);
854 gsi = gsi_last_bb (exit_e->src);
855 gsi_insert_after (&gsi, cond_expr, GSI_NEW_STMT);
857 split_block_after_labels (loop_header);
859 return loop;
862 /* Make area between HEADER_EDGE and LATCH_EDGE a loop by connecting
863 latch to header and update loop tree and dominators
864 accordingly. Everything between them plus LATCH_EDGE destination must
865 be dominated by HEADER_EDGE destination, and back-reachable from
866 LATCH_EDGE source. HEADER_EDGE is redirected to basic block SWITCH_BB,
867 FALSE_EDGE of SWITCH_BB to original destination of HEADER_EDGE and
868 TRUE_EDGE of SWITCH_BB to original destination of LATCH_EDGE.
869 Returns the newly created loop. Frequencies and counts in the new loop
870 are scaled by FALSE_SCALE and in the old one by TRUE_SCALE. */
872 struct loop *
873 loopify (edge latch_edge, edge header_edge,
874 basic_block switch_bb, edge true_edge, edge false_edge,
875 bool redirect_all_edges, unsigned true_scale, unsigned false_scale)
877 basic_block succ_bb = latch_edge->dest;
878 basic_block pred_bb = header_edge->src;
879 struct loop *loop = alloc_loop ();
880 struct loop *outer = loop_outer (succ_bb->loop_father);
881 int freq;
882 gcov_type cnt;
883 edge e;
884 edge_iterator ei;
886 loop->header = header_edge->dest;
887 loop->latch = latch_edge->src;
889 freq = EDGE_FREQUENCY (header_edge);
890 cnt = header_edge->count;
892 /* Redirect edges. */
893 loop_redirect_edge (latch_edge, loop->header);
894 loop_redirect_edge (true_edge, succ_bb);
896 /* During loop versioning, one of the switch_bb edge is already properly
897 set. Do not redirect it again unless redirect_all_edges is true. */
898 if (redirect_all_edges)
900 loop_redirect_edge (header_edge, switch_bb);
901 loop_redirect_edge (false_edge, loop->header);
903 /* Update dominators. */
904 set_immediate_dominator (CDI_DOMINATORS, switch_bb, pred_bb);
905 set_immediate_dominator (CDI_DOMINATORS, loop->header, switch_bb);
908 set_immediate_dominator (CDI_DOMINATORS, succ_bb, switch_bb);
910 /* Compute new loop. */
911 add_loop (loop, outer);
913 /* Add switch_bb to appropriate loop. */
914 if (switch_bb->loop_father)
915 remove_bb_from_loops (switch_bb);
916 add_bb_to_loop (switch_bb, outer);
918 /* Fix frequencies. */
919 if (redirect_all_edges)
921 switch_bb->frequency = freq;
922 switch_bb->count = cnt;
923 FOR_EACH_EDGE (e, ei, switch_bb->succs)
925 e->count = apply_probability (switch_bb->count, e->probability);
928 scale_loop_frequencies (loop, false_scale, REG_BR_PROB_BASE);
929 scale_loop_frequencies (succ_bb->loop_father, true_scale, REG_BR_PROB_BASE);
930 update_dominators_in_loop (loop);
932 return loop;
935 /* Remove the latch edge of a LOOP and update loops to indicate that
936 the LOOP was removed. After this function, original loop latch will
937 have no successor, which caller is expected to fix somehow.
939 If this may cause the information about irreducible regions to become
940 invalid, IRRED_INVALIDATED is set to true.
942 LOOP_CLOSED_SSA_INVALIDATED, if non-NULL, is a bitmap where we store
943 basic blocks that had non-trivial update on their loop_father.*/
945 void
946 unloop (struct loop *loop, bool *irred_invalidated,
947 bitmap loop_closed_ssa_invalidated)
949 basic_block *body;
950 struct loop *ploop;
951 unsigned i, n;
952 basic_block latch = loop->latch;
953 bool dummy = false;
955 if (loop_preheader_edge (loop)->flags & EDGE_IRREDUCIBLE_LOOP)
956 *irred_invalidated = true;
958 /* This is relatively straightforward. The dominators are unchanged, as
959 loop header dominates loop latch, so the only thing we have to care of
960 is the placement of loops and basic blocks inside the loop tree. We
961 move them all to the loop->outer, and then let fix_bb_placements do
962 its work. */
964 body = get_loop_body (loop);
965 n = loop->num_nodes;
966 for (i = 0; i < n; i++)
967 if (body[i]->loop_father == loop)
969 remove_bb_from_loops (body[i]);
970 add_bb_to_loop (body[i], loop_outer (loop));
972 free (body);
974 while (loop->inner)
976 ploop = loop->inner;
977 flow_loop_tree_node_remove (ploop);
978 flow_loop_tree_node_add (loop_outer (loop), ploop);
981 /* Remove the loop and free its data. */
982 delete_loop (loop);
984 remove_edge (single_succ_edge (latch));
986 /* We do not pass IRRED_INVALIDATED to fix_bb_placements here, as even if
987 there is an irreducible region inside the cancelled loop, the flags will
988 be still correct. */
989 fix_bb_placements (latch, &dummy, loop_closed_ssa_invalidated);
992 /* Fix placement of superloops of LOOP inside loop tree, i.e. ensure that
993 condition stated in description of fix_loop_placement holds for them.
994 It is used in case when we removed some edges coming out of LOOP, which
995 may cause the right placement of LOOP inside loop tree to change.
997 IRRED_INVALIDATED is set to true if a change in the loop structures might
998 invalidate the information about irreducible regions. */
1000 static void
1001 fix_loop_placements (struct loop *loop, bool *irred_invalidated)
1003 struct loop *outer;
1005 while (loop_outer (loop))
1007 outer = loop_outer (loop);
1008 if (!fix_loop_placement (loop, irred_invalidated))
1009 break;
1011 /* Changing the placement of a loop in the loop tree may alter the
1012 validity of condition 2) of the description of fix_bb_placement
1013 for its preheader, because the successor is the header and belongs
1014 to the loop. So call fix_bb_placements to fix up the placement
1015 of the preheader and (possibly) of its predecessors. */
1016 fix_bb_placements (loop_preheader_edge (loop)->src,
1017 irred_invalidated, NULL);
1018 loop = outer;
1022 /* Duplicate loop bounds and other information we store about
1023 the loop into its duplicate. */
1025 void
1026 copy_loop_info (struct loop *loop, struct loop *target)
1028 gcc_checking_assert (!target->any_upper_bound && !target->any_estimate);
1029 target->any_upper_bound = loop->any_upper_bound;
1030 target->nb_iterations_upper_bound = loop->nb_iterations_upper_bound;
1031 target->any_estimate = loop->any_estimate;
1032 target->nb_iterations_estimate = loop->nb_iterations_estimate;
1033 target->estimate_state = loop->estimate_state;
1034 target->warned_aggressive_loop_optimizations
1035 |= loop->warned_aggressive_loop_optimizations;
1038 /* Copies copy of LOOP as subloop of TARGET loop, placing newly
1039 created loop into loops structure. */
1040 struct loop *
1041 duplicate_loop (struct loop *loop, struct loop *target)
1043 struct loop *cloop;
1044 cloop = alloc_loop ();
1045 place_new_loop (cfun, cloop);
1047 copy_loop_info (loop, cloop);
1049 /* Mark the new loop as copy of LOOP. */
1050 set_loop_copy (loop, cloop);
1052 /* Add it to target. */
1053 flow_loop_tree_node_add (target, cloop);
1055 return cloop;
1058 /* Copies structure of subloops of LOOP into TARGET loop, placing
1059 newly created loops into loop tree. */
1060 void
1061 duplicate_subloops (struct loop *loop, struct loop *target)
1063 struct loop *aloop, *cloop;
1065 for (aloop = loop->inner; aloop; aloop = aloop->next)
1067 cloop = duplicate_loop (aloop, target);
1068 duplicate_subloops (aloop, cloop);
1072 /* Copies structure of subloops of N loops, stored in array COPIED_LOOPS,
1073 into TARGET loop, placing newly created loops into loop tree. */
1074 static void
1075 copy_loops_to (struct loop **copied_loops, int n, struct loop *target)
1077 struct loop *aloop;
1078 int i;
1080 for (i = 0; i < n; i++)
1082 aloop = duplicate_loop (copied_loops[i], target);
1083 duplicate_subloops (copied_loops[i], aloop);
1087 /* Redirects edge E to basic block DEST. */
1088 static void
1089 loop_redirect_edge (edge e, basic_block dest)
1091 if (e->dest == dest)
1092 return;
1094 redirect_edge_and_branch_force (e, dest);
1097 /* Check whether LOOP's body can be duplicated. */
1098 bool
1099 can_duplicate_loop_p (const struct loop *loop)
1101 int ret;
1102 basic_block *bbs = get_loop_body (loop);
1104 ret = can_copy_bbs_p (bbs, loop->num_nodes);
1105 free (bbs);
1107 return ret;
1110 /* Sets probability and count of edge E to zero. The probability and count
1111 is redistributed evenly to the remaining edges coming from E->src. */
1113 static void
1114 set_zero_probability (edge e)
1116 basic_block bb = e->src;
1117 edge_iterator ei;
1118 edge ae, last = NULL;
1119 unsigned n = EDGE_COUNT (bb->succs);
1120 gcov_type cnt = e->count, cnt1;
1121 unsigned prob = e->probability, prob1;
1123 gcc_assert (n > 1);
1124 cnt1 = cnt / (n - 1);
1125 prob1 = prob / (n - 1);
1127 FOR_EACH_EDGE (ae, ei, bb->succs)
1129 if (ae == e)
1130 continue;
1132 ae->probability += prob1;
1133 ae->count += cnt1;
1134 last = ae;
1137 /* Move the rest to one of the edges. */
1138 last->probability += prob % (n - 1);
1139 last->count += cnt % (n - 1);
1141 e->probability = 0;
1142 e->count = 0;
1145 /* Duplicates body of LOOP to given edge E NDUPL times. Takes care of updating
1146 loop structure and dominators. E's destination must be LOOP header for
1147 this to work, i.e. it must be entry or latch edge of this loop; these are
1148 unique, as the loops must have preheaders for this function to work
1149 correctly (in case E is latch, the function unrolls the loop, if E is entry
1150 edge, it peels the loop). Store edges created by copying ORIG edge from
1151 copies corresponding to set bits in WONT_EXIT bitmap (bit 0 corresponds to
1152 original LOOP body, the other copies are numbered in order given by control
1153 flow through them) into TO_REMOVE array. Returns false if duplication is
1154 impossible. */
1156 bool
1157 duplicate_loop_to_header_edge (struct loop *loop, edge e,
1158 unsigned int ndupl, sbitmap wont_exit,
1159 edge orig, vec<edge> *to_remove,
1160 int flags)
1162 struct loop *target, *aloop;
1163 struct loop **orig_loops;
1164 unsigned n_orig_loops;
1165 basic_block header = loop->header, latch = loop->latch;
1166 basic_block *new_bbs, *bbs, *first_active;
1167 basic_block new_bb, bb, first_active_latch = NULL;
1168 edge ae, latch_edge;
1169 edge spec_edges[2], new_spec_edges[2];
1170 #define SE_LATCH 0
1171 #define SE_ORIG 1
1172 unsigned i, j, n;
1173 int is_latch = (latch == e->src);
1174 int scale_act = 0, *scale_step = NULL, scale_main = 0;
1175 int scale_after_exit = 0;
1176 int p, freq_in, freq_le, freq_out_orig;
1177 int prob_pass_thru, prob_pass_wont_exit, prob_pass_main;
1178 int add_irreducible_flag;
1179 basic_block place_after;
1180 bitmap bbs_to_scale = NULL;
1181 bitmap_iterator bi;
1183 gcc_assert (e->dest == loop->header);
1184 gcc_assert (ndupl > 0);
1186 if (orig)
1188 /* Orig must be edge out of the loop. */
1189 gcc_assert (flow_bb_inside_loop_p (loop, orig->src));
1190 gcc_assert (!flow_bb_inside_loop_p (loop, orig->dest));
1193 n = loop->num_nodes;
1194 bbs = get_loop_body_in_dom_order (loop);
1195 gcc_assert (bbs[0] == loop->header);
1196 gcc_assert (bbs[n - 1] == loop->latch);
1198 /* Check whether duplication is possible. */
1199 if (!can_copy_bbs_p (bbs, loop->num_nodes))
1201 free (bbs);
1202 return false;
1204 new_bbs = XNEWVEC (basic_block, loop->num_nodes);
1206 /* In case we are doing loop peeling and the loop is in the middle of
1207 irreducible region, the peeled copies will be inside it too. */
1208 add_irreducible_flag = e->flags & EDGE_IRREDUCIBLE_LOOP;
1209 gcc_assert (!is_latch || !add_irreducible_flag);
1211 /* Find edge from latch. */
1212 latch_edge = loop_latch_edge (loop);
1214 if (flags & DLTHE_FLAG_UPDATE_FREQ)
1216 /* Calculate coefficients by that we have to scale frequencies
1217 of duplicated loop bodies. */
1218 freq_in = header->frequency;
1219 freq_le = EDGE_FREQUENCY (latch_edge);
1220 if (freq_in == 0)
1221 freq_in = 1;
1222 if (freq_in < freq_le)
1223 freq_in = freq_le;
1224 freq_out_orig = orig ? EDGE_FREQUENCY (orig) : freq_in - freq_le;
1225 if (freq_out_orig > freq_in - freq_le)
1226 freq_out_orig = freq_in - freq_le;
1227 prob_pass_thru = RDIV (REG_BR_PROB_BASE * freq_le, freq_in);
1228 prob_pass_wont_exit =
1229 RDIV (REG_BR_PROB_BASE * (freq_le + freq_out_orig), freq_in);
1231 if (orig
1232 && REG_BR_PROB_BASE - orig->probability != 0)
1234 /* The blocks that are dominated by a removed exit edge ORIG have
1235 frequencies scaled by this. */
1236 scale_after_exit
1237 = GCOV_COMPUTE_SCALE (REG_BR_PROB_BASE,
1238 REG_BR_PROB_BASE - orig->probability);
1239 bbs_to_scale = BITMAP_ALLOC (NULL);
1240 for (i = 0; i < n; i++)
1242 if (bbs[i] != orig->src
1243 && dominated_by_p (CDI_DOMINATORS, bbs[i], orig->src))
1244 bitmap_set_bit (bbs_to_scale, i);
1248 scale_step = XNEWVEC (int, ndupl);
1250 for (i = 1; i <= ndupl; i++)
1251 scale_step[i - 1] = bitmap_bit_p (wont_exit, i)
1252 ? prob_pass_wont_exit
1253 : prob_pass_thru;
1255 /* Complete peeling is special as the probability of exit in last
1256 copy becomes 1. */
1257 if (flags & DLTHE_FLAG_COMPLETTE_PEEL)
1259 int wanted_freq = EDGE_FREQUENCY (e);
1261 if (wanted_freq > freq_in)
1262 wanted_freq = freq_in;
1264 gcc_assert (!is_latch);
1265 /* First copy has frequency of incoming edge. Each subsequent
1266 frequency should be reduced by prob_pass_wont_exit. Caller
1267 should've managed the flags so all except for original loop
1268 has won't exist set. */
1269 scale_act = GCOV_COMPUTE_SCALE (wanted_freq, freq_in);
1270 /* Now simulate the duplication adjustments and compute header
1271 frequency of the last copy. */
1272 for (i = 0; i < ndupl; i++)
1273 wanted_freq = combine_probabilities (wanted_freq, scale_step[i]);
1274 scale_main = GCOV_COMPUTE_SCALE (wanted_freq, freq_in);
1276 else if (is_latch)
1278 prob_pass_main = bitmap_bit_p (wont_exit, 0)
1279 ? prob_pass_wont_exit
1280 : prob_pass_thru;
1281 p = prob_pass_main;
1282 scale_main = REG_BR_PROB_BASE;
1283 for (i = 0; i < ndupl; i++)
1285 scale_main += p;
1286 p = combine_probabilities (p, scale_step[i]);
1288 scale_main = GCOV_COMPUTE_SCALE (REG_BR_PROB_BASE, scale_main);
1289 scale_act = combine_probabilities (scale_main, prob_pass_main);
1291 else
1293 scale_main = REG_BR_PROB_BASE;
1294 for (i = 0; i < ndupl; i++)
1295 scale_main = combine_probabilities (scale_main, scale_step[i]);
1296 scale_act = REG_BR_PROB_BASE - prob_pass_thru;
1298 for (i = 0; i < ndupl; i++)
1299 gcc_assert (scale_step[i] >= 0 && scale_step[i] <= REG_BR_PROB_BASE);
1300 gcc_assert (scale_main >= 0 && scale_main <= REG_BR_PROB_BASE
1301 && scale_act >= 0 && scale_act <= REG_BR_PROB_BASE);
1304 /* Loop the new bbs will belong to. */
1305 target = e->src->loop_father;
1307 /* Original loops. */
1308 n_orig_loops = 0;
1309 for (aloop = loop->inner; aloop; aloop = aloop->next)
1310 n_orig_loops++;
1311 orig_loops = XNEWVEC (struct loop *, n_orig_loops);
1312 for (aloop = loop->inner, i = 0; aloop; aloop = aloop->next, i++)
1313 orig_loops[i] = aloop;
1315 set_loop_copy (loop, target);
1317 first_active = XNEWVEC (basic_block, n);
1318 if (is_latch)
1320 memcpy (first_active, bbs, n * sizeof (basic_block));
1321 first_active_latch = latch;
1324 spec_edges[SE_ORIG] = orig;
1325 spec_edges[SE_LATCH] = latch_edge;
1327 place_after = e->src;
1328 for (j = 0; j < ndupl; j++)
1330 /* Copy loops. */
1331 copy_loops_to (orig_loops, n_orig_loops, target);
1333 /* Copy bbs. */
1334 copy_bbs (bbs, n, new_bbs, spec_edges, 2, new_spec_edges, loop,
1335 place_after, true);
1336 place_after = new_spec_edges[SE_LATCH]->src;
1338 if (flags & DLTHE_RECORD_COPY_NUMBER)
1339 for (i = 0; i < n; i++)
1341 gcc_assert (!new_bbs[i]->aux);
1342 new_bbs[i]->aux = (void *)(size_t)(j + 1);
1345 /* Note whether the blocks and edges belong to an irreducible loop. */
1346 if (add_irreducible_flag)
1348 for (i = 0; i < n; i++)
1349 new_bbs[i]->flags |= BB_DUPLICATED;
1350 for (i = 0; i < n; i++)
1352 edge_iterator ei;
1353 new_bb = new_bbs[i];
1354 if (new_bb->loop_father == target)
1355 new_bb->flags |= BB_IRREDUCIBLE_LOOP;
1357 FOR_EACH_EDGE (ae, ei, new_bb->succs)
1358 if ((ae->dest->flags & BB_DUPLICATED)
1359 && (ae->src->loop_father == target
1360 || ae->dest->loop_father == target))
1361 ae->flags |= EDGE_IRREDUCIBLE_LOOP;
1363 for (i = 0; i < n; i++)
1364 new_bbs[i]->flags &= ~BB_DUPLICATED;
1367 /* Redirect the special edges. */
1368 if (is_latch)
1370 redirect_edge_and_branch_force (latch_edge, new_bbs[0]);
1371 redirect_edge_and_branch_force (new_spec_edges[SE_LATCH],
1372 loop->header);
1373 set_immediate_dominator (CDI_DOMINATORS, new_bbs[0], latch);
1374 latch = loop->latch = new_bbs[n - 1];
1375 e = latch_edge = new_spec_edges[SE_LATCH];
1377 else
1379 redirect_edge_and_branch_force (new_spec_edges[SE_LATCH],
1380 loop->header);
1381 redirect_edge_and_branch_force (e, new_bbs[0]);
1382 set_immediate_dominator (CDI_DOMINATORS, new_bbs[0], e->src);
1383 e = new_spec_edges[SE_LATCH];
1386 /* Record exit edge in this copy. */
1387 if (orig && bitmap_bit_p (wont_exit, j + 1))
1389 if (to_remove)
1390 to_remove->safe_push (new_spec_edges[SE_ORIG]);
1391 set_zero_probability (new_spec_edges[SE_ORIG]);
1393 /* Scale the frequencies of the blocks dominated by the exit. */
1394 if (bbs_to_scale)
1396 EXECUTE_IF_SET_IN_BITMAP (bbs_to_scale, 0, i, bi)
1398 scale_bbs_frequencies_int (new_bbs + i, 1, scale_after_exit,
1399 REG_BR_PROB_BASE);
1404 /* Record the first copy in the control flow order if it is not
1405 the original loop (i.e. in case of peeling). */
1406 if (!first_active_latch)
1408 memcpy (first_active, new_bbs, n * sizeof (basic_block));
1409 first_active_latch = new_bbs[n - 1];
1412 /* Set counts and frequencies. */
1413 if (flags & DLTHE_FLAG_UPDATE_FREQ)
1415 scale_bbs_frequencies_int (new_bbs, n, scale_act, REG_BR_PROB_BASE);
1416 scale_act = combine_probabilities (scale_act, scale_step[j]);
1419 free (new_bbs);
1420 free (orig_loops);
1422 /* Record the exit edge in the original loop body, and update the frequencies. */
1423 if (orig && bitmap_bit_p (wont_exit, 0))
1425 if (to_remove)
1426 to_remove->safe_push (orig);
1427 set_zero_probability (orig);
1429 /* Scale the frequencies of the blocks dominated by the exit. */
1430 if (bbs_to_scale)
1432 EXECUTE_IF_SET_IN_BITMAP (bbs_to_scale, 0, i, bi)
1434 scale_bbs_frequencies_int (bbs + i, 1, scale_after_exit,
1435 REG_BR_PROB_BASE);
1440 /* Update the original loop. */
1441 if (!is_latch)
1442 set_immediate_dominator (CDI_DOMINATORS, e->dest, e->src);
1443 if (flags & DLTHE_FLAG_UPDATE_FREQ)
1445 scale_bbs_frequencies_int (bbs, n, scale_main, REG_BR_PROB_BASE);
1446 free (scale_step);
1449 /* Update dominators of outer blocks if affected. */
1450 for (i = 0; i < n; i++)
1452 basic_block dominated, dom_bb;
1453 vec<basic_block> dom_bbs;
1454 unsigned j;
1456 bb = bbs[i];
1457 bb->aux = 0;
1459 dom_bbs = get_dominated_by (CDI_DOMINATORS, bb);
1460 FOR_EACH_VEC_ELT (dom_bbs, j, dominated)
1462 if (flow_bb_inside_loop_p (loop, dominated))
1463 continue;
1464 dom_bb = nearest_common_dominator (
1465 CDI_DOMINATORS, first_active[i], first_active_latch);
1466 set_immediate_dominator (CDI_DOMINATORS, dominated, dom_bb);
1468 dom_bbs.release ();
1470 free (first_active);
1472 free (bbs);
1473 BITMAP_FREE (bbs_to_scale);
1475 return true;
1478 /* A callback for make_forwarder block, to redirect all edges except for
1479 MFB_KJ_EDGE to the entry part. E is the edge for that we should decide
1480 whether to redirect it. */
1482 edge mfb_kj_edge;
1483 bool
1484 mfb_keep_just (edge e)
1486 return e != mfb_kj_edge;
1489 /* True when a candidate preheader BLOCK has predecessors from LOOP. */
1491 static bool
1492 has_preds_from_loop (basic_block block, struct loop *loop)
1494 edge e;
1495 edge_iterator ei;
1497 FOR_EACH_EDGE (e, ei, block->preds)
1498 if (e->src->loop_father == loop)
1499 return true;
1500 return false;
1503 /* Creates a pre-header for a LOOP. Returns newly created block. Unless
1504 CP_SIMPLE_PREHEADERS is set in FLAGS, we only force LOOP to have single
1505 entry; otherwise we also force preheader block to have only one successor.
1506 When CP_FALLTHRU_PREHEADERS is set in FLAGS, we force the preheader block
1507 to be a fallthru predecessor to the loop header and to have only
1508 predecessors from outside of the loop.
1509 The function also updates dominators. */
1511 basic_block
1512 create_preheader (struct loop *loop, int flags)
1514 edge e, fallthru;
1515 basic_block dummy;
1516 int nentry = 0;
1517 bool irred = false;
1518 bool latch_edge_was_fallthru;
1519 edge one_succ_pred = NULL, single_entry = NULL;
1520 edge_iterator ei;
1522 FOR_EACH_EDGE (e, ei, loop->header->preds)
1524 if (e->src == loop->latch)
1525 continue;
1526 irred |= (e->flags & EDGE_IRREDUCIBLE_LOOP) != 0;
1527 nentry++;
1528 single_entry = e;
1529 if (single_succ_p (e->src))
1530 one_succ_pred = e;
1532 gcc_assert (nentry);
1533 if (nentry == 1)
1535 bool need_forwarder_block = false;
1537 /* We do not allow entry block to be the loop preheader, since we
1538 cannot emit code there. */
1539 if (single_entry->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1540 need_forwarder_block = true;
1541 else
1543 /* If we want simple preheaders, also force the preheader to have
1544 just a single successor. */
1545 if ((flags & CP_SIMPLE_PREHEADERS)
1546 && !single_succ_p (single_entry->src))
1547 need_forwarder_block = true;
1548 /* If we want fallthru preheaders, also create forwarder block when
1549 preheader ends with a jump or has predecessors from loop. */
1550 else if ((flags & CP_FALLTHRU_PREHEADERS)
1551 && (JUMP_P (BB_END (single_entry->src))
1552 || has_preds_from_loop (single_entry->src, loop)))
1553 need_forwarder_block = true;
1555 if (! need_forwarder_block)
1556 return NULL;
1559 mfb_kj_edge = loop_latch_edge (loop);
1560 latch_edge_was_fallthru = (mfb_kj_edge->flags & EDGE_FALLTHRU) != 0;
1561 fallthru = make_forwarder_block (loop->header, mfb_keep_just, NULL);
1562 dummy = fallthru->src;
1563 loop->header = fallthru->dest;
1565 /* Try to be clever in placing the newly created preheader. The idea is to
1566 avoid breaking any "fallthruness" relationship between blocks.
1568 The preheader was created just before the header and all incoming edges
1569 to the header were redirected to the preheader, except the latch edge.
1570 So the only problematic case is when this latch edge was a fallthru
1571 edge: it is not anymore after the preheader creation so we have broken
1572 the fallthruness. We're therefore going to look for a better place. */
1573 if (latch_edge_was_fallthru)
1575 if (one_succ_pred)
1576 e = one_succ_pred;
1577 else
1578 e = EDGE_PRED (dummy, 0);
1580 move_block_after (dummy, e->src);
1583 if (irred)
1585 dummy->flags |= BB_IRREDUCIBLE_LOOP;
1586 single_succ_edge (dummy)->flags |= EDGE_IRREDUCIBLE_LOOP;
1589 if (dump_file)
1590 fprintf (dump_file, "Created preheader block for loop %i\n",
1591 loop->num);
1593 if (flags & CP_FALLTHRU_PREHEADERS)
1594 gcc_assert ((single_succ_edge (dummy)->flags & EDGE_FALLTHRU)
1595 && !JUMP_P (BB_END (dummy)));
1597 return dummy;
1600 /* Create preheaders for each loop; for meaning of FLAGS see create_preheader. */
1602 void
1603 create_preheaders (int flags)
1605 struct loop *loop;
1607 if (!current_loops)
1608 return;
1610 FOR_EACH_LOOP (loop, 0)
1611 create_preheader (loop, flags);
1612 loops_state_set (LOOPS_HAVE_PREHEADERS);
1615 /* Forces all loop latches to have only single successor. */
1617 void
1618 force_single_succ_latches (void)
1620 struct loop *loop;
1621 edge e;
1623 FOR_EACH_LOOP (loop, 0)
1625 if (loop->latch != loop->header && single_succ_p (loop->latch))
1626 continue;
1628 e = find_edge (loop->latch, loop->header);
1629 gcc_checking_assert (e != NULL);
1631 split_edge (e);
1633 loops_state_set (LOOPS_HAVE_SIMPLE_LATCHES);
1636 /* This function is called from loop_version. It splits the entry edge
1637 of the loop we want to version, adds the versioning condition, and
1638 adjust the edges to the two versions of the loop appropriately.
1639 e is an incoming edge. Returns the basic block containing the
1640 condition.
1642 --- edge e ---- > [second_head]
1644 Split it and insert new conditional expression and adjust edges.
1646 --- edge e ---> [cond expr] ---> [first_head]
1648 +---------> [second_head]
1650 THEN_PROB is the probability of then branch of the condition. */
1652 static basic_block
1653 lv_adjust_loop_entry_edge (basic_block first_head, basic_block second_head,
1654 edge e, void *cond_expr, unsigned then_prob)
1656 basic_block new_head = NULL;
1657 edge e1;
1659 gcc_assert (e->dest == second_head);
1661 /* Split edge 'e'. This will create a new basic block, where we can
1662 insert conditional expr. */
1663 new_head = split_edge (e);
1665 lv_add_condition_to_bb (first_head, second_head, new_head,
1666 cond_expr);
1668 /* Don't set EDGE_TRUE_VALUE in RTL mode, as it's invalid there. */
1669 e = single_succ_edge (new_head);
1670 e1 = make_edge (new_head, first_head,
1671 current_ir_type () == IR_GIMPLE ? EDGE_TRUE_VALUE : 0);
1672 e1->probability = then_prob;
1673 e->probability = REG_BR_PROB_BASE - then_prob;
1674 e1->count = apply_probability (e->count, e1->probability);
1675 e->count = apply_probability (e->count, e->probability);
1677 set_immediate_dominator (CDI_DOMINATORS, first_head, new_head);
1678 set_immediate_dominator (CDI_DOMINATORS, second_head, new_head);
1680 /* Adjust loop header phi nodes. */
1681 lv_adjust_loop_header_phi (first_head, second_head, new_head, e1);
1683 return new_head;
1686 /* Main entry point for Loop Versioning transformation.
1688 This transformation given a condition and a loop, creates
1689 -if (condition) { loop_copy1 } else { loop_copy2 },
1690 where loop_copy1 is the loop transformed in one way, and loop_copy2
1691 is the loop transformed in another way (or unchanged). 'condition'
1692 may be a run time test for things that were not resolved by static
1693 analysis (overlapping ranges (anti-aliasing), alignment, etc.).
1695 THEN_PROB is the probability of the then edge of the if. THEN_SCALE
1696 is the ratio by that the frequencies in the original loop should
1697 be scaled. ELSE_SCALE is the ratio by that the frequencies in the
1698 new loop should be scaled.
1700 If PLACE_AFTER is true, we place the new loop after LOOP in the
1701 instruction stream, otherwise it is placed before LOOP. */
1703 struct loop *
1704 loop_version (struct loop *loop,
1705 void *cond_expr, basic_block *condition_bb,
1706 unsigned then_prob, unsigned then_scale, unsigned else_scale,
1707 bool place_after)
1709 basic_block first_head, second_head;
1710 edge entry, latch_edge, true_edge, false_edge;
1711 int irred_flag;
1712 struct loop *nloop;
1713 basic_block cond_bb;
1715 /* Record entry and latch edges for the loop */
1716 entry = loop_preheader_edge (loop);
1717 irred_flag = entry->flags & EDGE_IRREDUCIBLE_LOOP;
1718 entry->flags &= ~EDGE_IRREDUCIBLE_LOOP;
1720 /* Note down head of loop as first_head. */
1721 first_head = entry->dest;
1723 /* Duplicate loop. */
1724 if (!cfg_hook_duplicate_loop_to_header_edge (loop, entry, 1,
1725 NULL, NULL, NULL, 0))
1727 entry->flags |= irred_flag;
1728 return NULL;
1731 /* After duplication entry edge now points to new loop head block.
1732 Note down new head as second_head. */
1733 second_head = entry->dest;
1735 /* Split loop entry edge and insert new block with cond expr. */
1736 cond_bb = lv_adjust_loop_entry_edge (first_head, second_head,
1737 entry, cond_expr, then_prob);
1738 if (condition_bb)
1739 *condition_bb = cond_bb;
1741 if (!cond_bb)
1743 entry->flags |= irred_flag;
1744 return NULL;
1747 latch_edge = single_succ_edge (get_bb_copy (loop->latch));
1749 extract_cond_bb_edges (cond_bb, &true_edge, &false_edge);
1750 nloop = loopify (latch_edge,
1751 single_pred_edge (get_bb_copy (loop->header)),
1752 cond_bb, true_edge, false_edge,
1753 false /* Do not redirect all edges. */,
1754 then_scale, else_scale);
1756 copy_loop_info (loop, nloop);
1758 /* loopify redirected latch_edge. Update its PENDING_STMTS. */
1759 lv_flush_pending_stmts (latch_edge);
1761 /* loopify redirected condition_bb's succ edge. Update its PENDING_STMTS. */
1762 extract_cond_bb_edges (cond_bb, &true_edge, &false_edge);
1763 lv_flush_pending_stmts (false_edge);
1764 /* Adjust irreducible flag. */
1765 if (irred_flag)
1767 cond_bb->flags |= BB_IRREDUCIBLE_LOOP;
1768 loop_preheader_edge (loop)->flags |= EDGE_IRREDUCIBLE_LOOP;
1769 loop_preheader_edge (nloop)->flags |= EDGE_IRREDUCIBLE_LOOP;
1770 single_pred_edge (cond_bb)->flags |= EDGE_IRREDUCIBLE_LOOP;
1773 if (place_after)
1775 basic_block *bbs = get_loop_body_in_dom_order (nloop), after;
1776 unsigned i;
1778 after = loop->latch;
1780 for (i = 0; i < nloop->num_nodes; i++)
1782 move_block_after (bbs[i], after);
1783 after = bbs[i];
1785 free (bbs);
1788 /* At this point condition_bb is loop preheader with two successors,
1789 first_head and second_head. Make sure that loop preheader has only
1790 one successor. */
1791 split_edge (loop_preheader_edge (loop));
1792 split_edge (loop_preheader_edge (nloop));
1794 return nloop;