* cfgloop.h (struct loop): Add comment. New field orig_loop_num.
[official-gcc.git] / gcc / cfgloopmanip.c
blob3f4ff988c7400decf9099a131d6c35c3a3bd73b5
1 /* Loop manipulation code for GNU compiler.
2 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "cfganal.h"
29 #include "cfgloop.h"
30 #include "gimple-iterator.h"
31 #include "gimplify-me.h"
32 #include "tree-ssa-loop-manip.h"
33 #include "dumpfile.h"
35 static void copy_loops_to (struct loop **, int,
36 struct loop *);
37 static void loop_redirect_edge (edge, basic_block);
38 static void remove_bbs (basic_block *, int);
39 static bool rpe_enum_p (const_basic_block, const void *);
40 static int find_path (edge, basic_block **);
41 static void fix_loop_placements (struct loop *, bool *);
42 static bool fix_bb_placement (basic_block);
43 static void fix_bb_placements (basic_block, bool *, bitmap);
45 /* Checks whether basic block BB is dominated by DATA. */
46 static bool
47 rpe_enum_p (const_basic_block bb, const void *data)
49 return dominated_by_p (CDI_DOMINATORS, bb, (const_basic_block) data);
52 /* Remove basic blocks BBS. NBBS is the number of the basic blocks. */
54 static void
55 remove_bbs (basic_block *bbs, int nbbs)
57 int i;
59 for (i = 0; i < nbbs; i++)
60 delete_basic_block (bbs[i]);
63 /* Find path -- i.e. the basic blocks dominated by edge E and put them
64 into array BBS, that will be allocated large enough to contain them.
65 E->dest must have exactly one predecessor for this to work (it is
66 easy to achieve and we do not put it here because we do not want to
67 alter anything by this function). The number of basic blocks in the
68 path is returned. */
69 static int
70 find_path (edge e, basic_block **bbs)
72 gcc_assert (EDGE_COUNT (e->dest->preds) <= 1);
74 /* Find bbs in the path. */
75 *bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
76 return dfs_enumerate_from (e->dest, 0, rpe_enum_p, *bbs,
77 n_basic_blocks_for_fn (cfun), e->dest);
80 /* Fix placement of basic block BB inside loop hierarchy --
81 Let L be a loop to that BB belongs. Then every successor of BB must either
82 1) belong to some superloop of loop L, or
83 2) be a header of loop K such that K->outer is superloop of L
84 Returns true if we had to move BB into other loop to enforce this condition,
85 false if the placement of BB was already correct (provided that placements
86 of its successors are correct). */
87 static bool
88 fix_bb_placement (basic_block bb)
90 edge e;
91 edge_iterator ei;
92 struct loop *loop = current_loops->tree_root, *act;
94 FOR_EACH_EDGE (e, ei, bb->succs)
96 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
97 continue;
99 act = e->dest->loop_father;
100 if (act->header == e->dest)
101 act = loop_outer (act);
103 if (flow_loop_nested_p (loop, act))
104 loop = act;
107 if (loop == bb->loop_father)
108 return false;
110 remove_bb_from_loops (bb);
111 add_bb_to_loop (bb, loop);
113 return true;
116 /* Fix placement of LOOP inside loop tree, i.e. find the innermost superloop
117 of LOOP to that leads at least one exit edge of LOOP, and set it
118 as the immediate superloop of LOOP. Return true if the immediate superloop
119 of LOOP changed.
121 IRRED_INVALIDATED is set to true if a change in the loop structures might
122 invalidate the information about irreducible regions. */
124 static bool
125 fix_loop_placement (struct loop *loop, bool *irred_invalidated)
127 unsigned i;
128 edge e;
129 vec<edge> exits = get_loop_exit_edges (loop);
130 struct loop *father = current_loops->tree_root, *act;
131 bool ret = false;
133 FOR_EACH_VEC_ELT (exits, i, e)
135 act = find_common_loop (loop, e->dest->loop_father);
136 if (flow_loop_nested_p (father, act))
137 father = act;
140 if (father != loop_outer (loop))
142 for (act = loop_outer (loop); act != father; act = loop_outer (act))
143 act->num_nodes -= loop->num_nodes;
144 flow_loop_tree_node_remove (loop);
145 flow_loop_tree_node_add (father, loop);
147 /* The exit edges of LOOP no longer exits its original immediate
148 superloops; remove them from the appropriate exit lists. */
149 FOR_EACH_VEC_ELT (exits, i, e)
151 /* We may need to recompute irreducible loops. */
152 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
153 *irred_invalidated = true;
154 rescan_loop_exit (e, false, false);
157 ret = true;
160 exits.release ();
161 return ret;
164 /* Fix placements of basic blocks inside loop hierarchy stored in loops; i.e.
165 enforce condition stated in description of fix_bb_placement. We
166 start from basic block FROM that had some of its successors removed, so that
167 his placement no longer has to be correct, and iteratively fix placement of
168 its predecessors that may change if placement of FROM changed. Also fix
169 placement of subloops of FROM->loop_father, that might also be altered due
170 to this change; the condition for them is similar, except that instead of
171 successors we consider edges coming out of the loops.
173 If the changes may invalidate the information about irreducible regions,
174 IRRED_INVALIDATED is set to true.
176 If LOOP_CLOSED_SSA_INVLIDATED is non-zero then all basic blocks with
177 changed loop_father are collected there. */
179 static void
180 fix_bb_placements (basic_block from,
181 bool *irred_invalidated,
182 bitmap loop_closed_ssa_invalidated)
184 basic_block *queue, *qtop, *qbeg, *qend;
185 struct loop *base_loop, *target_loop;
186 edge e;
188 /* We pass through blocks back-reachable from FROM, testing whether some
189 of their successors moved to outer loop. It may be necessary to
190 iterate several times, but it is finite, as we stop unless we move
191 the basic block up the loop structure. The whole story is a bit
192 more complicated due to presence of subloops, those are moved using
193 fix_loop_placement. */
195 base_loop = from->loop_father;
196 /* If we are already in the outermost loop, the basic blocks cannot be moved
197 outside of it. If FROM is the header of the base loop, it cannot be moved
198 outside of it, either. In both cases, we can end now. */
199 if (base_loop == current_loops->tree_root
200 || from == base_loop->header)
201 return;
203 auto_sbitmap in_queue (last_basic_block_for_fn (cfun));
204 bitmap_clear (in_queue);
205 bitmap_set_bit (in_queue, from->index);
206 /* Prevent us from going out of the base_loop. */
207 bitmap_set_bit (in_queue, base_loop->header->index);
209 queue = XNEWVEC (basic_block, base_loop->num_nodes + 1);
210 qtop = queue + base_loop->num_nodes + 1;
211 qbeg = queue;
212 qend = queue + 1;
213 *qbeg = from;
215 while (qbeg != qend)
217 edge_iterator ei;
218 from = *qbeg;
219 qbeg++;
220 if (qbeg == qtop)
221 qbeg = queue;
222 bitmap_clear_bit (in_queue, from->index);
224 if (from->loop_father->header == from)
226 /* Subloop header, maybe move the loop upward. */
227 if (!fix_loop_placement (from->loop_father, irred_invalidated))
228 continue;
229 target_loop = loop_outer (from->loop_father);
230 if (loop_closed_ssa_invalidated)
232 basic_block *bbs = get_loop_body (from->loop_father);
233 for (unsigned i = 0; i < from->loop_father->num_nodes; ++i)
234 bitmap_set_bit (loop_closed_ssa_invalidated, bbs[i]->index);
235 free (bbs);
238 else
240 /* Ordinary basic block. */
241 if (!fix_bb_placement (from))
242 continue;
243 target_loop = from->loop_father;
244 if (loop_closed_ssa_invalidated)
245 bitmap_set_bit (loop_closed_ssa_invalidated, from->index);
248 FOR_EACH_EDGE (e, ei, from->succs)
250 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
251 *irred_invalidated = true;
254 /* Something has changed, insert predecessors into queue. */
255 FOR_EACH_EDGE (e, ei, from->preds)
257 basic_block pred = e->src;
258 struct loop *nca;
260 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
261 *irred_invalidated = true;
263 if (bitmap_bit_p (in_queue, pred->index))
264 continue;
266 /* If it is subloop, then it either was not moved, or
267 the path up the loop tree from base_loop do not contain
268 it. */
269 nca = find_common_loop (pred->loop_father, base_loop);
270 if (pred->loop_father != base_loop
271 && (nca == base_loop
272 || nca != pred->loop_father))
273 pred = pred->loop_father->header;
274 else if (!flow_loop_nested_p (target_loop, pred->loop_father))
276 /* If PRED is already higher in the loop hierarchy than the
277 TARGET_LOOP to that we moved FROM, the change of the position
278 of FROM does not affect the position of PRED, so there is no
279 point in processing it. */
280 continue;
283 if (bitmap_bit_p (in_queue, pred->index))
284 continue;
286 /* Schedule the basic block. */
287 *qend = pred;
288 qend++;
289 if (qend == qtop)
290 qend = queue;
291 bitmap_set_bit (in_queue, pred->index);
294 free (queue);
297 /* Removes path beginning at edge E, i.e. remove basic blocks dominated by E
298 and update loop structures and dominators. Return true if we were able
299 to remove the path, false otherwise (and nothing is affected then). */
300 bool
301 remove_path (edge e, bool *irred_invalidated,
302 bitmap loop_closed_ssa_invalidated)
304 edge ae;
305 basic_block *rem_bbs, *bord_bbs, from, bb;
306 vec<basic_block> dom_bbs;
307 int i, nrem, n_bord_bbs;
308 bool local_irred_invalidated = false;
309 edge_iterator ei;
310 struct loop *l, *f;
312 if (! irred_invalidated)
313 irred_invalidated = &local_irred_invalidated;
315 if (!can_remove_branch_p (e))
316 return false;
318 /* Keep track of whether we need to update information about irreducible
319 regions. This is the case if the removed area is a part of the
320 irreducible region, or if the set of basic blocks that belong to a loop
321 that is inside an irreducible region is changed, or if such a loop is
322 removed. */
323 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
324 *irred_invalidated = true;
326 /* We need to check whether basic blocks are dominated by the edge
327 e, but we only have basic block dominators. This is easy to
328 fix -- when e->dest has exactly one predecessor, this corresponds
329 to blocks dominated by e->dest, if not, split the edge. */
330 if (!single_pred_p (e->dest))
331 e = single_pred_edge (split_edge (e));
333 /* It may happen that by removing path we remove one or more loops
334 we belong to. In this case first unloop the loops, then proceed
335 normally. We may assume that e->dest is not a header of any loop,
336 as it now has exactly one predecessor. */
337 for (l = e->src->loop_father; loop_outer (l); l = f)
339 f = loop_outer (l);
340 if (dominated_by_p (CDI_DOMINATORS, l->latch, e->dest))
341 unloop (l, irred_invalidated, loop_closed_ssa_invalidated);
344 /* Identify the path. */
345 nrem = find_path (e, &rem_bbs);
347 n_bord_bbs = 0;
348 bord_bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
349 auto_sbitmap seen (last_basic_block_for_fn (cfun));
350 bitmap_clear (seen);
352 /* Find "border" hexes -- i.e. those with predecessor in removed path. */
353 for (i = 0; i < nrem; i++)
354 bitmap_set_bit (seen, rem_bbs[i]->index);
355 if (!*irred_invalidated)
356 FOR_EACH_EDGE (ae, ei, e->src->succs)
357 if (ae != e && ae->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
358 && !bitmap_bit_p (seen, ae->dest->index)
359 && ae->flags & EDGE_IRREDUCIBLE_LOOP)
361 *irred_invalidated = true;
362 break;
365 for (i = 0; i < nrem; i++)
367 bb = rem_bbs[i];
368 FOR_EACH_EDGE (ae, ei, rem_bbs[i]->succs)
369 if (ae->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
370 && !bitmap_bit_p (seen, ae->dest->index))
372 bitmap_set_bit (seen, ae->dest->index);
373 bord_bbs[n_bord_bbs++] = ae->dest;
375 if (ae->flags & EDGE_IRREDUCIBLE_LOOP)
376 *irred_invalidated = true;
380 /* Remove the path. */
381 from = e->src;
382 remove_branch (e);
383 dom_bbs.create (0);
385 /* Cancel loops contained in the path. */
386 for (i = 0; i < nrem; i++)
387 if (rem_bbs[i]->loop_father->header == rem_bbs[i])
388 cancel_loop_tree (rem_bbs[i]->loop_father);
390 remove_bbs (rem_bbs, nrem);
391 free (rem_bbs);
393 /* Find blocks whose dominators may be affected. */
394 bitmap_clear (seen);
395 for (i = 0; i < n_bord_bbs; i++)
397 basic_block ldom;
399 bb = get_immediate_dominator (CDI_DOMINATORS, bord_bbs[i]);
400 if (bitmap_bit_p (seen, bb->index))
401 continue;
402 bitmap_set_bit (seen, bb->index);
404 for (ldom = first_dom_son (CDI_DOMINATORS, bb);
405 ldom;
406 ldom = next_dom_son (CDI_DOMINATORS, ldom))
407 if (!dominated_by_p (CDI_DOMINATORS, from, ldom))
408 dom_bbs.safe_push (ldom);
411 /* Recount dominators. */
412 iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, true);
413 dom_bbs.release ();
414 free (bord_bbs);
416 /* Fix placements of basic blocks inside loops and the placement of
417 loops in the loop tree. */
418 fix_bb_placements (from, irred_invalidated, loop_closed_ssa_invalidated);
419 fix_loop_placements (from->loop_father, irred_invalidated);
421 if (local_irred_invalidated
422 && loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS))
423 mark_irreducible_loops ();
425 return true;
428 /* Creates place for a new LOOP in loops structure of FN. */
430 void
431 place_new_loop (struct function *fn, struct loop *loop)
433 loop->num = number_of_loops (fn);
434 vec_safe_push (loops_for_fn (fn)->larray, loop);
437 /* Given LOOP structure with filled header and latch, find the body of the
438 corresponding loop and add it to loops tree. Insert the LOOP as a son of
439 outer. */
441 void
442 add_loop (struct loop *loop, struct loop *outer)
444 basic_block *bbs;
445 int i, n;
446 struct loop *subloop;
447 edge e;
448 edge_iterator ei;
450 /* Add it to loop structure. */
451 place_new_loop (cfun, loop);
452 flow_loop_tree_node_add (outer, loop);
454 /* Find its nodes. */
455 bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
456 n = get_loop_body_with_size (loop, bbs, n_basic_blocks_for_fn (cfun));
458 for (i = 0; i < n; i++)
460 if (bbs[i]->loop_father == outer)
462 remove_bb_from_loops (bbs[i]);
463 add_bb_to_loop (bbs[i], loop);
464 continue;
467 loop->num_nodes++;
469 /* If we find a direct subloop of OUTER, move it to LOOP. */
470 subloop = bbs[i]->loop_father;
471 if (loop_outer (subloop) == outer
472 && subloop->header == bbs[i])
474 flow_loop_tree_node_remove (subloop);
475 flow_loop_tree_node_add (loop, subloop);
479 /* Update the information about loop exit edges. */
480 for (i = 0; i < n; i++)
482 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
484 rescan_loop_exit (e, false, false);
488 free (bbs);
491 /* Scale profile of loop by P. */
493 void
494 scale_loop_frequencies (struct loop *loop, profile_probability p)
496 basic_block *bbs;
498 bbs = get_loop_body (loop);
499 scale_bbs_frequencies (bbs, loop->num_nodes, p);
500 free (bbs);
503 /* Scale profile in LOOP by P.
504 If ITERATION_BOUND is non-zero, scale even further if loop is predicted
505 to iterate too many times. */
507 void
508 scale_loop_profile (struct loop *loop, profile_probability p,
509 gcov_type iteration_bound)
511 gcov_type iterations = expected_loop_iterations_unbounded (loop);
512 edge e;
513 edge_iterator ei;
515 if (dump_file && (dump_flags & TDF_DETAILS))
517 fprintf (dump_file, ";; Scaling loop %i with scale ",
518 loop->num);
519 p.dump (dump_file);
520 fprintf (dump_file, " bounding iterations to %i from guessed %i\n",
521 (int)iteration_bound, (int)iterations);
524 /* See if loop is predicted to iterate too many times. */
525 if (iteration_bound && iterations > 0
526 && p.apply (iterations) > iteration_bound)
528 /* Fixing loop profile for different trip count is not trivial; the exit
529 probabilities has to be updated to match and frequencies propagated down
530 to the loop body.
532 We fully update only the simple case of loop with single exit that is
533 either from the latch or BB just before latch and leads from BB with
534 simple conditional jump. This is OK for use in vectorizer. */
535 e = single_exit (loop);
536 if (e)
538 edge other_e;
539 int freq_delta;
540 profile_count count_delta;
542 FOR_EACH_EDGE (other_e, ei, e->src->succs)
543 if (!(other_e->flags & (EDGE_ABNORMAL | EDGE_FAKE))
544 && e != other_e)
545 break;
547 /* Probability of exit must be 1/iterations. */
548 freq_delta = EDGE_FREQUENCY (e);
549 e->probability = profile_probability::from_reg_br_prob_base
550 (REG_BR_PROB_BASE / iteration_bound);
551 other_e->probability = e->probability.invert ();
552 freq_delta -= EDGE_FREQUENCY (e);
554 /* Adjust counts accordingly. */
555 count_delta = e->count;
556 e->count = e->src->count.apply_probability (e->probability);
557 other_e->count = e->src->count.apply_probability (other_e->probability);
558 count_delta -= e->count;
560 /* If latch exists, change its frequency and count, since we changed
561 probability of exit. Theoretically we should update everything from
562 source of exit edge to latch, but for vectorizer this is enough. */
563 if (loop->latch
564 && loop->latch != e->src)
566 loop->latch->frequency += freq_delta;
567 if (loop->latch->frequency < 0)
568 loop->latch->frequency = 0;
569 loop->latch->count += count_delta;
573 /* Roughly speaking we want to reduce the loop body profile by the
574 difference of loop iterations. We however can do better if
575 we look at the actual profile, if it is available. */
576 p = p.apply_scale (iteration_bound, iterations);
578 bool determined = false;
579 if (loop->header->count.initialized_p ())
581 profile_count count_in = profile_count::zero ();
583 FOR_EACH_EDGE (e, ei, loop->header->preds)
584 if (e->src != loop->latch)
585 count_in += e->count;
587 if (count_in > profile_count::zero () )
589 p = count_in.probability_in (loop->header->count.apply_scale
590 (iteration_bound, 1));
591 determined = true;
594 if (!determined)
596 int freq_in = 0;
598 FOR_EACH_EDGE (e, ei, loop->header->preds)
599 if (e->src != loop->latch)
600 freq_in += EDGE_FREQUENCY (e);
602 if (freq_in != 0)
603 p = profile_probability::probability_in_gcov_type
604 (freq_in * iteration_bound, loop->header->frequency);
606 if (!(p > profile_probability::never ()))
607 p = profile_probability::very_unlikely ();
610 if (p >= profile_probability::always ()
611 || !p.initialized_p ())
612 return;
614 /* Scale the actual probabilities. */
615 scale_loop_frequencies (loop, p);
616 if (dump_file && (dump_flags & TDF_DETAILS))
617 fprintf (dump_file, ";; guessed iterations are now %i\n",
618 (int)expected_loop_iterations_unbounded (loop));
621 /* Recompute dominance information for basic blocks outside LOOP. */
623 static void
624 update_dominators_in_loop (struct loop *loop)
626 vec<basic_block> dom_bbs = vNULL;
627 basic_block *body;
628 unsigned i;
630 auto_sbitmap seen (last_basic_block_for_fn (cfun));
631 bitmap_clear (seen);
632 body = get_loop_body (loop);
634 for (i = 0; i < loop->num_nodes; i++)
635 bitmap_set_bit (seen, body[i]->index);
637 for (i = 0; i < loop->num_nodes; i++)
639 basic_block ldom;
641 for (ldom = first_dom_son (CDI_DOMINATORS, body[i]);
642 ldom;
643 ldom = next_dom_son (CDI_DOMINATORS, ldom))
644 if (!bitmap_bit_p (seen, ldom->index))
646 bitmap_set_bit (seen, ldom->index);
647 dom_bbs.safe_push (ldom);
651 iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, false);
652 free (body);
653 dom_bbs.release ();
656 /* Creates an if region as shown above. CONDITION is used to create
657 the test for the if.
660 | ------------- -------------
661 | | pred_bb | | pred_bb |
662 | ------------- -------------
663 | | |
664 | | | ENTRY_EDGE
665 | | ENTRY_EDGE V
666 | | ====> -------------
667 | | | cond_bb |
668 | | | CONDITION |
669 | | -------------
670 | V / \
671 | ------------- e_false / \ e_true
672 | | succ_bb | V V
673 | ------------- ----------- -----------
674 | | false_bb | | true_bb |
675 | ----------- -----------
676 | \ /
677 | \ /
678 | V V
679 | -------------
680 | | join_bb |
681 | -------------
682 | | exit_edge (result)
684 | -----------
685 | | succ_bb |
686 | -----------
690 edge
691 create_empty_if_region_on_edge (edge entry_edge, tree condition)
694 basic_block cond_bb, true_bb, false_bb, join_bb;
695 edge e_true, e_false, exit_edge;
696 gcond *cond_stmt;
697 tree simple_cond;
698 gimple_stmt_iterator gsi;
700 cond_bb = split_edge (entry_edge);
702 /* Insert condition in cond_bb. */
703 gsi = gsi_last_bb (cond_bb);
704 simple_cond =
705 force_gimple_operand_gsi (&gsi, condition, true, NULL,
706 false, GSI_NEW_STMT);
707 cond_stmt = gimple_build_cond_from_tree (simple_cond, NULL_TREE, NULL_TREE);
708 gsi = gsi_last_bb (cond_bb);
709 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
711 join_bb = split_edge (single_succ_edge (cond_bb));
713 e_true = single_succ_edge (cond_bb);
714 true_bb = split_edge (e_true);
716 e_false = make_edge (cond_bb, join_bb, 0);
717 false_bb = split_edge (e_false);
719 e_true->flags &= ~EDGE_FALLTHRU;
720 e_true->flags |= EDGE_TRUE_VALUE;
721 e_false->flags &= ~EDGE_FALLTHRU;
722 e_false->flags |= EDGE_FALSE_VALUE;
724 set_immediate_dominator (CDI_DOMINATORS, cond_bb, entry_edge->src);
725 set_immediate_dominator (CDI_DOMINATORS, true_bb, cond_bb);
726 set_immediate_dominator (CDI_DOMINATORS, false_bb, cond_bb);
727 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
729 exit_edge = single_succ_edge (join_bb);
731 if (single_pred_p (exit_edge->dest))
732 set_immediate_dominator (CDI_DOMINATORS, exit_edge->dest, join_bb);
734 return exit_edge;
737 /* create_empty_loop_on_edge
739 | - pred_bb - ------ pred_bb ------
740 | | | | iv0 = initial_value |
741 | -----|----- ---------|-----------
742 | | ______ | entry_edge
743 | | entry_edge / | |
744 | | ====> | -V---V- loop_header -------------
745 | V | | iv_before = phi (iv0, iv_after) |
746 | - succ_bb - | ---|-----------------------------
747 | | | | |
748 | ----------- | ---V--- loop_body ---------------
749 | | | iv_after = iv_before + stride |
750 | | | if (iv_before < upper_bound) |
751 | | ---|--------------\--------------
752 | | | \ exit_e
753 | | V \
754 | | - loop_latch - V- succ_bb -
755 | | | | | |
756 | | /------------- -----------
757 | \ ___ /
759 Creates an empty loop as shown above, the IV_BEFORE is the SSA_NAME
760 that is used before the increment of IV. IV_BEFORE should be used for
761 adding code to the body that uses the IV. OUTER is the outer loop in
762 which the new loop should be inserted.
764 Both INITIAL_VALUE and UPPER_BOUND expressions are gimplified and
765 inserted on the loop entry edge. This implies that this function
766 should be used only when the UPPER_BOUND expression is a loop
767 invariant. */
769 struct loop *
770 create_empty_loop_on_edge (edge entry_edge,
771 tree initial_value,
772 tree stride, tree upper_bound,
773 tree iv,
774 tree *iv_before,
775 tree *iv_after,
776 struct loop *outer)
778 basic_block loop_header, loop_latch, succ_bb, pred_bb;
779 struct loop *loop;
780 gimple_stmt_iterator gsi;
781 gimple_seq stmts;
782 gcond *cond_expr;
783 tree exit_test;
784 edge exit_e;
786 gcc_assert (entry_edge && initial_value && stride && upper_bound && iv);
788 /* Create header, latch and wire up the loop. */
789 pred_bb = entry_edge->src;
790 loop_header = split_edge (entry_edge);
791 loop_latch = split_edge (single_succ_edge (loop_header));
792 succ_bb = single_succ (loop_latch);
793 make_edge (loop_header, succ_bb, 0);
794 redirect_edge_succ_nodup (single_succ_edge (loop_latch), loop_header);
796 /* Set immediate dominator information. */
797 set_immediate_dominator (CDI_DOMINATORS, loop_header, pred_bb);
798 set_immediate_dominator (CDI_DOMINATORS, loop_latch, loop_header);
799 set_immediate_dominator (CDI_DOMINATORS, succ_bb, loop_header);
801 /* Initialize a loop structure and put it in a loop hierarchy. */
802 loop = alloc_loop ();
803 loop->header = loop_header;
804 loop->latch = loop_latch;
805 add_loop (loop, outer);
807 /* TODO: Fix frequencies and counts. */
808 scale_loop_frequencies (loop, profile_probability::even ());
810 /* Update dominators. */
811 update_dominators_in_loop (loop);
813 /* Modify edge flags. */
814 exit_e = single_exit (loop);
815 exit_e->flags = EDGE_LOOP_EXIT | EDGE_FALSE_VALUE;
816 single_pred_edge (loop_latch)->flags = EDGE_TRUE_VALUE;
818 /* Construct IV code in loop. */
819 initial_value = force_gimple_operand (initial_value, &stmts, true, iv);
820 if (stmts)
822 gsi_insert_seq_on_edge (loop_preheader_edge (loop), stmts);
823 gsi_commit_edge_inserts ();
826 upper_bound = force_gimple_operand (upper_bound, &stmts, true, NULL);
827 if (stmts)
829 gsi_insert_seq_on_edge (loop_preheader_edge (loop), stmts);
830 gsi_commit_edge_inserts ();
833 gsi = gsi_last_bb (loop_header);
834 create_iv (initial_value, stride, iv, loop, &gsi, false,
835 iv_before, iv_after);
837 /* Insert loop exit condition. */
838 cond_expr = gimple_build_cond
839 (LT_EXPR, *iv_before, upper_bound, NULL_TREE, NULL_TREE);
841 exit_test = gimple_cond_lhs (cond_expr);
842 exit_test = force_gimple_operand_gsi (&gsi, exit_test, true, NULL,
843 false, GSI_NEW_STMT);
844 gimple_cond_set_lhs (cond_expr, exit_test);
845 gsi = gsi_last_bb (exit_e->src);
846 gsi_insert_after (&gsi, cond_expr, GSI_NEW_STMT);
848 split_block_after_labels (loop_header);
850 return loop;
853 /* Make area between HEADER_EDGE and LATCH_EDGE a loop by connecting
854 latch to header and update loop tree and dominators
855 accordingly. Everything between them plus LATCH_EDGE destination must
856 be dominated by HEADER_EDGE destination, and back-reachable from
857 LATCH_EDGE source. HEADER_EDGE is redirected to basic block SWITCH_BB,
858 FALSE_EDGE of SWITCH_BB to original destination of HEADER_EDGE and
859 TRUE_EDGE of SWITCH_BB to original destination of LATCH_EDGE.
860 Returns the newly created loop. Frequencies and counts in the new loop
861 are scaled by FALSE_SCALE and in the old one by TRUE_SCALE. */
863 struct loop *
864 loopify (edge latch_edge, edge header_edge,
865 basic_block switch_bb, edge true_edge, edge false_edge,
866 bool redirect_all_edges, profile_probability true_scale,
867 profile_probability false_scale)
869 basic_block succ_bb = latch_edge->dest;
870 basic_block pred_bb = header_edge->src;
871 struct loop *loop = alloc_loop ();
872 struct loop *outer = loop_outer (succ_bb->loop_father);
873 int freq;
874 profile_count cnt;
875 edge e;
876 edge_iterator ei;
878 loop->header = header_edge->dest;
879 loop->latch = latch_edge->src;
881 freq = EDGE_FREQUENCY (header_edge);
882 cnt = header_edge->count;
884 /* Redirect edges. */
885 loop_redirect_edge (latch_edge, loop->header);
886 loop_redirect_edge (true_edge, succ_bb);
888 /* During loop versioning, one of the switch_bb edge is already properly
889 set. Do not redirect it again unless redirect_all_edges is true. */
890 if (redirect_all_edges)
892 loop_redirect_edge (header_edge, switch_bb);
893 loop_redirect_edge (false_edge, loop->header);
895 /* Update dominators. */
896 set_immediate_dominator (CDI_DOMINATORS, switch_bb, pred_bb);
897 set_immediate_dominator (CDI_DOMINATORS, loop->header, switch_bb);
900 set_immediate_dominator (CDI_DOMINATORS, succ_bb, switch_bb);
902 /* Compute new loop. */
903 add_loop (loop, outer);
905 /* Add switch_bb to appropriate loop. */
906 if (switch_bb->loop_father)
907 remove_bb_from_loops (switch_bb);
908 add_bb_to_loop (switch_bb, outer);
910 /* Fix frequencies. */
911 if (redirect_all_edges)
913 switch_bb->frequency = freq;
914 switch_bb->count = cnt;
915 FOR_EACH_EDGE (e, ei, switch_bb->succs)
917 e->count = switch_bb->count.apply_probability (e->probability);
920 scale_loop_frequencies (loop, false_scale);
921 scale_loop_frequencies (succ_bb->loop_father, true_scale);
922 update_dominators_in_loop (loop);
924 return loop;
927 /* Remove the latch edge of a LOOP and update loops to indicate that
928 the LOOP was removed. After this function, original loop latch will
929 have no successor, which caller is expected to fix somehow.
931 If this may cause the information about irreducible regions to become
932 invalid, IRRED_INVALIDATED is set to true.
934 LOOP_CLOSED_SSA_INVALIDATED, if non-NULL, is a bitmap where we store
935 basic blocks that had non-trivial update on their loop_father.*/
937 void
938 unloop (struct loop *loop, bool *irred_invalidated,
939 bitmap loop_closed_ssa_invalidated)
941 basic_block *body;
942 struct loop *ploop;
943 unsigned i, n;
944 basic_block latch = loop->latch;
945 bool dummy = false;
947 if (loop_preheader_edge (loop)->flags & EDGE_IRREDUCIBLE_LOOP)
948 *irred_invalidated = true;
950 /* This is relatively straightforward. The dominators are unchanged, as
951 loop header dominates loop latch, so the only thing we have to care of
952 is the placement of loops and basic blocks inside the loop tree. We
953 move them all to the loop->outer, and then let fix_bb_placements do
954 its work. */
956 body = get_loop_body (loop);
957 n = loop->num_nodes;
958 for (i = 0; i < n; i++)
959 if (body[i]->loop_father == loop)
961 remove_bb_from_loops (body[i]);
962 add_bb_to_loop (body[i], loop_outer (loop));
964 free (body);
966 while (loop->inner)
968 ploop = loop->inner;
969 flow_loop_tree_node_remove (ploop);
970 flow_loop_tree_node_add (loop_outer (loop), ploop);
973 /* Remove the loop and free its data. */
974 delete_loop (loop);
976 remove_edge (single_succ_edge (latch));
978 /* We do not pass IRRED_INVALIDATED to fix_bb_placements here, as even if
979 there is an irreducible region inside the cancelled loop, the flags will
980 be still correct. */
981 fix_bb_placements (latch, &dummy, loop_closed_ssa_invalidated);
984 /* Fix placement of superloops of LOOP inside loop tree, i.e. ensure that
985 condition stated in description of fix_loop_placement holds for them.
986 It is used in case when we removed some edges coming out of LOOP, which
987 may cause the right placement of LOOP inside loop tree to change.
989 IRRED_INVALIDATED is set to true if a change in the loop structures might
990 invalidate the information about irreducible regions. */
992 static void
993 fix_loop_placements (struct loop *loop, bool *irred_invalidated)
995 struct loop *outer;
997 while (loop_outer (loop))
999 outer = loop_outer (loop);
1000 if (!fix_loop_placement (loop, irred_invalidated))
1001 break;
1003 /* Changing the placement of a loop in the loop tree may alter the
1004 validity of condition 2) of the description of fix_bb_placement
1005 for its preheader, because the successor is the header and belongs
1006 to the loop. So call fix_bb_placements to fix up the placement
1007 of the preheader and (possibly) of its predecessors. */
1008 fix_bb_placements (loop_preheader_edge (loop)->src,
1009 irred_invalidated, NULL);
1010 loop = outer;
1014 /* Duplicate loop bounds and other information we store about
1015 the loop into its duplicate. */
1017 void
1018 copy_loop_info (struct loop *loop, struct loop *target)
1020 gcc_checking_assert (!target->any_upper_bound && !target->any_estimate);
1021 target->any_upper_bound = loop->any_upper_bound;
1022 target->nb_iterations_upper_bound = loop->nb_iterations_upper_bound;
1023 target->any_likely_upper_bound = loop->any_likely_upper_bound;
1024 target->nb_iterations_likely_upper_bound
1025 = loop->nb_iterations_likely_upper_bound;
1026 target->any_estimate = loop->any_estimate;
1027 target->nb_iterations_estimate = loop->nb_iterations_estimate;
1028 target->estimate_state = loop->estimate_state;
1029 target->constraints = loop->constraints;
1030 target->warned_aggressive_loop_optimizations
1031 |= loop->warned_aggressive_loop_optimizations;
1032 target->in_oacc_kernels_region = loop->in_oacc_kernels_region;
1035 /* Copies copy of LOOP as subloop of TARGET loop, placing newly
1036 created loop into loops structure. */
1037 struct loop *
1038 duplicate_loop (struct loop *loop, struct loop *target)
1040 struct loop *cloop;
1041 cloop = alloc_loop ();
1042 place_new_loop (cfun, cloop);
1044 copy_loop_info (loop, cloop);
1046 /* Mark the new loop as copy of LOOP. */
1047 set_loop_copy (loop, cloop);
1049 /* Add it to target. */
1050 flow_loop_tree_node_add (target, cloop);
1052 return cloop;
1055 /* Copies structure of subloops of LOOP into TARGET loop, placing
1056 newly created loops into loop tree. */
1057 void
1058 duplicate_subloops (struct loop *loop, struct loop *target)
1060 struct loop *aloop, *cloop;
1062 for (aloop = loop->inner; aloop; aloop = aloop->next)
1064 cloop = duplicate_loop (aloop, target);
1065 duplicate_subloops (aloop, cloop);
1069 /* Copies structure of subloops of N loops, stored in array COPIED_LOOPS,
1070 into TARGET loop, placing newly created loops into loop tree. */
1071 static void
1072 copy_loops_to (struct loop **copied_loops, int n, struct loop *target)
1074 struct loop *aloop;
1075 int i;
1077 for (i = 0; i < n; i++)
1079 aloop = duplicate_loop (copied_loops[i], target);
1080 duplicate_subloops (copied_loops[i], aloop);
1084 /* Redirects edge E to basic block DEST. */
1085 static void
1086 loop_redirect_edge (edge e, basic_block dest)
1088 if (e->dest == dest)
1089 return;
1091 redirect_edge_and_branch_force (e, dest);
1094 /* Check whether LOOP's body can be duplicated. */
1095 bool
1096 can_duplicate_loop_p (const struct loop *loop)
1098 int ret;
1099 basic_block *bbs = get_loop_body (loop);
1101 ret = can_copy_bbs_p (bbs, loop->num_nodes);
1102 free (bbs);
1104 return ret;
1107 /* Duplicates body of LOOP to given edge E NDUPL times. Takes care of updating
1108 loop structure and dominators. E's destination must be LOOP header for
1109 this to work, i.e. it must be entry or latch edge of this loop; these are
1110 unique, as the loops must have preheaders for this function to work
1111 correctly (in case E is latch, the function unrolls the loop, if E is entry
1112 edge, it peels the loop). Store edges created by copying ORIG edge from
1113 copies corresponding to set bits in WONT_EXIT bitmap (bit 0 corresponds to
1114 original LOOP body, the other copies are numbered in order given by control
1115 flow through them) into TO_REMOVE array. Returns false if duplication is
1116 impossible. */
1118 bool
1119 duplicate_loop_to_header_edge (struct loop *loop, edge e,
1120 unsigned int ndupl, sbitmap wont_exit,
1121 edge orig, vec<edge> *to_remove,
1122 int flags)
1124 struct loop *target, *aloop;
1125 struct loop **orig_loops;
1126 unsigned n_orig_loops;
1127 basic_block header = loop->header, latch = loop->latch;
1128 basic_block *new_bbs, *bbs, *first_active;
1129 basic_block new_bb, bb, first_active_latch = NULL;
1130 edge ae, latch_edge;
1131 edge spec_edges[2], new_spec_edges[2];
1132 #define SE_LATCH 0
1133 #define SE_ORIG 1
1134 unsigned i, j, n;
1135 int is_latch = (latch == e->src);
1136 int scale_act = 0, *scale_step = NULL, scale_main = 0;
1137 int scale_after_exit = 0;
1138 int p, freq_in, freq_le, freq_out_orig;
1139 int prob_pass_thru, prob_pass_wont_exit, prob_pass_main;
1140 int add_irreducible_flag;
1141 basic_block place_after;
1142 bitmap bbs_to_scale = NULL;
1143 bitmap_iterator bi;
1145 gcc_assert (e->dest == loop->header);
1146 gcc_assert (ndupl > 0);
1148 if (orig)
1150 /* Orig must be edge out of the loop. */
1151 gcc_assert (flow_bb_inside_loop_p (loop, orig->src));
1152 gcc_assert (!flow_bb_inside_loop_p (loop, orig->dest));
1155 n = loop->num_nodes;
1156 bbs = get_loop_body_in_dom_order (loop);
1157 gcc_assert (bbs[0] == loop->header);
1158 gcc_assert (bbs[n - 1] == loop->latch);
1160 /* Check whether duplication is possible. */
1161 if (!can_copy_bbs_p (bbs, loop->num_nodes))
1163 free (bbs);
1164 return false;
1166 new_bbs = XNEWVEC (basic_block, loop->num_nodes);
1168 /* In case we are doing loop peeling and the loop is in the middle of
1169 irreducible region, the peeled copies will be inside it too. */
1170 add_irreducible_flag = e->flags & EDGE_IRREDUCIBLE_LOOP;
1171 gcc_assert (!is_latch || !add_irreducible_flag);
1173 /* Find edge from latch. */
1174 latch_edge = loop_latch_edge (loop);
1176 if (flags & DLTHE_FLAG_UPDATE_FREQ)
1178 /* Calculate coefficients by that we have to scale frequencies
1179 of duplicated loop bodies. */
1180 freq_in = header->frequency;
1181 freq_le = EDGE_FREQUENCY (latch_edge);
1182 if (freq_in == 0)
1183 freq_in = 1;
1184 if (freq_in < freq_le)
1185 freq_in = freq_le;
1186 freq_out_orig = orig ? EDGE_FREQUENCY (orig) : freq_in - freq_le;
1187 if (freq_out_orig > freq_in - freq_le)
1188 freq_out_orig = freq_in - freq_le;
1189 prob_pass_thru = RDIV (REG_BR_PROB_BASE * freq_le, freq_in);
1190 prob_pass_wont_exit =
1191 RDIV (REG_BR_PROB_BASE * (freq_le + freq_out_orig), freq_in);
1193 if (orig && orig->probability.initialized_p ()
1194 && !(orig->probability == profile_probability::always ()))
1196 /* The blocks that are dominated by a removed exit edge ORIG have
1197 frequencies scaled by this. */
1198 if (orig->probability.initialized_p ())
1199 scale_after_exit
1200 = GCOV_COMPUTE_SCALE (REG_BR_PROB_BASE,
1201 REG_BR_PROB_BASE
1202 - orig->probability.to_reg_br_prob_base ());
1203 else
1204 scale_after_exit = REG_BR_PROB_BASE;
1205 bbs_to_scale = BITMAP_ALLOC (NULL);
1206 for (i = 0; i < n; i++)
1208 if (bbs[i] != orig->src
1209 && dominated_by_p (CDI_DOMINATORS, bbs[i], orig->src))
1210 bitmap_set_bit (bbs_to_scale, i);
1214 scale_step = XNEWVEC (int, ndupl);
1216 for (i = 1; i <= ndupl; i++)
1217 scale_step[i - 1] = bitmap_bit_p (wont_exit, i)
1218 ? prob_pass_wont_exit
1219 : prob_pass_thru;
1221 /* Complete peeling is special as the probability of exit in last
1222 copy becomes 1. */
1223 if (flags & DLTHE_FLAG_COMPLETTE_PEEL)
1225 int wanted_freq = EDGE_FREQUENCY (e);
1227 if (wanted_freq > freq_in)
1228 wanted_freq = freq_in;
1230 gcc_assert (!is_latch);
1231 /* First copy has frequency of incoming edge. Each subsequent
1232 frequency should be reduced by prob_pass_wont_exit. Caller
1233 should've managed the flags so all except for original loop
1234 has won't exist set. */
1235 scale_act = GCOV_COMPUTE_SCALE (wanted_freq, freq_in);
1236 /* Now simulate the duplication adjustments and compute header
1237 frequency of the last copy. */
1238 for (i = 0; i < ndupl; i++)
1239 wanted_freq = combine_probabilities (wanted_freq, scale_step[i]);
1240 scale_main = GCOV_COMPUTE_SCALE (wanted_freq, freq_in);
1242 else if (is_latch)
1244 prob_pass_main = bitmap_bit_p (wont_exit, 0)
1245 ? prob_pass_wont_exit
1246 : prob_pass_thru;
1247 p = prob_pass_main;
1248 scale_main = REG_BR_PROB_BASE;
1249 for (i = 0; i < ndupl; i++)
1251 scale_main += p;
1252 p = combine_probabilities (p, scale_step[i]);
1254 scale_main = GCOV_COMPUTE_SCALE (REG_BR_PROB_BASE, scale_main);
1255 scale_act = combine_probabilities (scale_main, prob_pass_main);
1257 else
1259 int preheader_freq = EDGE_FREQUENCY (e);
1260 scale_main = REG_BR_PROB_BASE;
1261 for (i = 0; i < ndupl; i++)
1262 scale_main = combine_probabilities (scale_main, scale_step[i]);
1263 if (preheader_freq > freq_in)
1264 preheader_freq = freq_in;
1265 scale_act = GCOV_COMPUTE_SCALE (preheader_freq, freq_in);
1267 for (i = 0; i < ndupl; i++)
1268 gcc_assert (scale_step[i] >= 0 && scale_step[i] <= REG_BR_PROB_BASE);
1269 gcc_assert (scale_main >= 0 && scale_main <= REG_BR_PROB_BASE
1270 && scale_act >= 0 && scale_act <= REG_BR_PROB_BASE);
1273 /* Loop the new bbs will belong to. */
1274 target = e->src->loop_father;
1276 /* Original loops. */
1277 n_orig_loops = 0;
1278 for (aloop = loop->inner; aloop; aloop = aloop->next)
1279 n_orig_loops++;
1280 orig_loops = XNEWVEC (struct loop *, n_orig_loops);
1281 for (aloop = loop->inner, i = 0; aloop; aloop = aloop->next, i++)
1282 orig_loops[i] = aloop;
1284 set_loop_copy (loop, target);
1286 first_active = XNEWVEC (basic_block, n);
1287 if (is_latch)
1289 memcpy (first_active, bbs, n * sizeof (basic_block));
1290 first_active_latch = latch;
1293 spec_edges[SE_ORIG] = orig;
1294 spec_edges[SE_LATCH] = latch_edge;
1296 place_after = e->src;
1297 for (j = 0; j < ndupl; j++)
1299 /* Copy loops. */
1300 copy_loops_to (orig_loops, n_orig_loops, target);
1302 /* Copy bbs. */
1303 copy_bbs (bbs, n, new_bbs, spec_edges, 2, new_spec_edges, loop,
1304 place_after, true);
1305 place_after = new_spec_edges[SE_LATCH]->src;
1307 if (flags & DLTHE_RECORD_COPY_NUMBER)
1308 for (i = 0; i < n; i++)
1310 gcc_assert (!new_bbs[i]->aux);
1311 new_bbs[i]->aux = (void *)(size_t)(j + 1);
1314 /* Note whether the blocks and edges belong to an irreducible loop. */
1315 if (add_irreducible_flag)
1317 for (i = 0; i < n; i++)
1318 new_bbs[i]->flags |= BB_DUPLICATED;
1319 for (i = 0; i < n; i++)
1321 edge_iterator ei;
1322 new_bb = new_bbs[i];
1323 if (new_bb->loop_father == target)
1324 new_bb->flags |= BB_IRREDUCIBLE_LOOP;
1326 FOR_EACH_EDGE (ae, ei, new_bb->succs)
1327 if ((ae->dest->flags & BB_DUPLICATED)
1328 && (ae->src->loop_father == target
1329 || ae->dest->loop_father == target))
1330 ae->flags |= EDGE_IRREDUCIBLE_LOOP;
1332 for (i = 0; i < n; i++)
1333 new_bbs[i]->flags &= ~BB_DUPLICATED;
1336 /* Redirect the special edges. */
1337 if (is_latch)
1339 redirect_edge_and_branch_force (latch_edge, new_bbs[0]);
1340 redirect_edge_and_branch_force (new_spec_edges[SE_LATCH],
1341 loop->header);
1342 set_immediate_dominator (CDI_DOMINATORS, new_bbs[0], latch);
1343 latch = loop->latch = new_bbs[n - 1];
1344 e = latch_edge = new_spec_edges[SE_LATCH];
1346 else
1348 redirect_edge_and_branch_force (new_spec_edges[SE_LATCH],
1349 loop->header);
1350 redirect_edge_and_branch_force (e, new_bbs[0]);
1351 set_immediate_dominator (CDI_DOMINATORS, new_bbs[0], e->src);
1352 e = new_spec_edges[SE_LATCH];
1355 /* Record exit edge in this copy. */
1356 if (orig && bitmap_bit_p (wont_exit, j + 1))
1358 if (to_remove)
1359 to_remove->safe_push (new_spec_edges[SE_ORIG]);
1360 force_edge_cold (new_spec_edges[SE_ORIG], true);
1362 /* Scale the frequencies of the blocks dominated by the exit. */
1363 if (bbs_to_scale)
1365 EXECUTE_IF_SET_IN_BITMAP (bbs_to_scale, 0, i, bi)
1367 scale_bbs_frequencies_int (new_bbs + i, 1, scale_after_exit,
1368 REG_BR_PROB_BASE);
1373 /* Record the first copy in the control flow order if it is not
1374 the original loop (i.e. in case of peeling). */
1375 if (!first_active_latch)
1377 memcpy (first_active, new_bbs, n * sizeof (basic_block));
1378 first_active_latch = new_bbs[n - 1];
1381 /* Set counts and frequencies. */
1382 if (flags & DLTHE_FLAG_UPDATE_FREQ)
1384 scale_bbs_frequencies_int (new_bbs, n, scale_act, REG_BR_PROB_BASE);
1385 scale_act = combine_probabilities (scale_act, scale_step[j]);
1388 free (new_bbs);
1389 free (orig_loops);
1391 /* Record the exit edge in the original loop body, and update the frequencies. */
1392 if (orig && bitmap_bit_p (wont_exit, 0))
1394 if (to_remove)
1395 to_remove->safe_push (orig);
1396 force_edge_cold (orig, true);
1398 /* Scale the frequencies of the blocks dominated by the exit. */
1399 if (bbs_to_scale)
1401 EXECUTE_IF_SET_IN_BITMAP (bbs_to_scale, 0, i, bi)
1403 scale_bbs_frequencies_int (bbs + i, 1, scale_after_exit,
1404 REG_BR_PROB_BASE);
1409 /* Update the original loop. */
1410 if (!is_latch)
1411 set_immediate_dominator (CDI_DOMINATORS, e->dest, e->src);
1412 if (flags & DLTHE_FLAG_UPDATE_FREQ)
1414 scale_bbs_frequencies_int (bbs, n, scale_main, REG_BR_PROB_BASE);
1415 free (scale_step);
1418 /* Update dominators of outer blocks if affected. */
1419 for (i = 0; i < n; i++)
1421 basic_block dominated, dom_bb;
1422 vec<basic_block> dom_bbs;
1423 unsigned j;
1425 bb = bbs[i];
1426 bb->aux = 0;
1428 dom_bbs = get_dominated_by (CDI_DOMINATORS, bb);
1429 FOR_EACH_VEC_ELT (dom_bbs, j, dominated)
1431 if (flow_bb_inside_loop_p (loop, dominated))
1432 continue;
1433 dom_bb = nearest_common_dominator (
1434 CDI_DOMINATORS, first_active[i], first_active_latch);
1435 set_immediate_dominator (CDI_DOMINATORS, dominated, dom_bb);
1437 dom_bbs.release ();
1439 free (first_active);
1441 free (bbs);
1442 BITMAP_FREE (bbs_to_scale);
1444 return true;
1447 /* A callback for make_forwarder block, to redirect all edges except for
1448 MFB_KJ_EDGE to the entry part. E is the edge for that we should decide
1449 whether to redirect it. */
1451 edge mfb_kj_edge;
1452 bool
1453 mfb_keep_just (edge e)
1455 return e != mfb_kj_edge;
1458 /* True when a candidate preheader BLOCK has predecessors from LOOP. */
1460 static bool
1461 has_preds_from_loop (basic_block block, struct loop *loop)
1463 edge e;
1464 edge_iterator ei;
1466 FOR_EACH_EDGE (e, ei, block->preds)
1467 if (e->src->loop_father == loop)
1468 return true;
1469 return false;
1472 /* Creates a pre-header for a LOOP. Returns newly created block. Unless
1473 CP_SIMPLE_PREHEADERS is set in FLAGS, we only force LOOP to have single
1474 entry; otherwise we also force preheader block to have only one successor.
1475 When CP_FALLTHRU_PREHEADERS is set in FLAGS, we force the preheader block
1476 to be a fallthru predecessor to the loop header and to have only
1477 predecessors from outside of the loop.
1478 The function also updates dominators. */
1480 basic_block
1481 create_preheader (struct loop *loop, int flags)
1483 edge e;
1484 basic_block dummy;
1485 int nentry = 0;
1486 bool irred = false;
1487 bool latch_edge_was_fallthru;
1488 edge one_succ_pred = NULL, single_entry = NULL;
1489 edge_iterator ei;
1491 FOR_EACH_EDGE (e, ei, loop->header->preds)
1493 if (e->src == loop->latch)
1494 continue;
1495 irred |= (e->flags & EDGE_IRREDUCIBLE_LOOP) != 0;
1496 nentry++;
1497 single_entry = e;
1498 if (single_succ_p (e->src))
1499 one_succ_pred = e;
1501 gcc_assert (nentry);
1502 if (nentry == 1)
1504 bool need_forwarder_block = false;
1506 /* We do not allow entry block to be the loop preheader, since we
1507 cannot emit code there. */
1508 if (single_entry->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1509 need_forwarder_block = true;
1510 else
1512 /* If we want simple preheaders, also force the preheader to have
1513 just a single successor. */
1514 if ((flags & CP_SIMPLE_PREHEADERS)
1515 && !single_succ_p (single_entry->src))
1516 need_forwarder_block = true;
1517 /* If we want fallthru preheaders, also create forwarder block when
1518 preheader ends with a jump or has predecessors from loop. */
1519 else if ((flags & CP_FALLTHRU_PREHEADERS)
1520 && (JUMP_P (BB_END (single_entry->src))
1521 || has_preds_from_loop (single_entry->src, loop)))
1522 need_forwarder_block = true;
1524 if (! need_forwarder_block)
1525 return NULL;
1528 mfb_kj_edge = loop_latch_edge (loop);
1529 latch_edge_was_fallthru = (mfb_kj_edge->flags & EDGE_FALLTHRU) != 0;
1530 if (nentry == 1)
1531 dummy = split_edge (single_entry);
1532 else
1534 edge fallthru = make_forwarder_block (loop->header, mfb_keep_just, NULL);
1535 dummy = fallthru->src;
1536 loop->header = fallthru->dest;
1539 /* Try to be clever in placing the newly created preheader. The idea is to
1540 avoid breaking any "fallthruness" relationship between blocks.
1542 The preheader was created just before the header and all incoming edges
1543 to the header were redirected to the preheader, except the latch edge.
1544 So the only problematic case is when this latch edge was a fallthru
1545 edge: it is not anymore after the preheader creation so we have broken
1546 the fallthruness. We're therefore going to look for a better place. */
1547 if (latch_edge_was_fallthru)
1549 if (one_succ_pred)
1550 e = one_succ_pred;
1551 else
1552 e = EDGE_PRED (dummy, 0);
1554 move_block_after (dummy, e->src);
1557 if (irred)
1559 dummy->flags |= BB_IRREDUCIBLE_LOOP;
1560 single_succ_edge (dummy)->flags |= EDGE_IRREDUCIBLE_LOOP;
1563 if (dump_file)
1564 fprintf (dump_file, "Created preheader block for loop %i\n",
1565 loop->num);
1567 if (flags & CP_FALLTHRU_PREHEADERS)
1568 gcc_assert ((single_succ_edge (dummy)->flags & EDGE_FALLTHRU)
1569 && !JUMP_P (BB_END (dummy)));
1571 return dummy;
1574 /* Create preheaders for each loop; for meaning of FLAGS see create_preheader. */
1576 void
1577 create_preheaders (int flags)
1579 struct loop *loop;
1581 if (!current_loops)
1582 return;
1584 FOR_EACH_LOOP (loop, 0)
1585 create_preheader (loop, flags);
1586 loops_state_set (LOOPS_HAVE_PREHEADERS);
1589 /* Forces all loop latches to have only single successor. */
1591 void
1592 force_single_succ_latches (void)
1594 struct loop *loop;
1595 edge e;
1597 FOR_EACH_LOOP (loop, 0)
1599 if (loop->latch != loop->header && single_succ_p (loop->latch))
1600 continue;
1602 e = find_edge (loop->latch, loop->header);
1603 gcc_checking_assert (e != NULL);
1605 split_edge (e);
1607 loops_state_set (LOOPS_HAVE_SIMPLE_LATCHES);
1610 /* This function is called from loop_version. It splits the entry edge
1611 of the loop we want to version, adds the versioning condition, and
1612 adjust the edges to the two versions of the loop appropriately.
1613 e is an incoming edge. Returns the basic block containing the
1614 condition.
1616 --- edge e ---- > [second_head]
1618 Split it and insert new conditional expression and adjust edges.
1620 --- edge e ---> [cond expr] ---> [first_head]
1622 +---------> [second_head]
1624 THEN_PROB is the probability of then branch of the condition.
1625 ELSE_PROB is the probability of else branch. Note that they may be both
1626 REG_BR_PROB_BASE when condition is IFN_LOOP_VECTORIZED or
1627 IFN_LOOP_DIST_ALIAS. */
1629 static basic_block
1630 lv_adjust_loop_entry_edge (basic_block first_head, basic_block second_head,
1631 edge e, void *cond_expr,
1632 profile_probability then_prob,
1633 profile_probability else_prob)
1635 basic_block new_head = NULL;
1636 edge e1;
1638 gcc_assert (e->dest == second_head);
1640 /* Split edge 'e'. This will create a new basic block, where we can
1641 insert conditional expr. */
1642 new_head = split_edge (e);
1644 lv_add_condition_to_bb (first_head, second_head, new_head,
1645 cond_expr);
1647 /* Don't set EDGE_TRUE_VALUE in RTL mode, as it's invalid there. */
1648 e = single_succ_edge (new_head);
1649 e1 = make_edge (new_head, first_head,
1650 current_ir_type () == IR_GIMPLE ? EDGE_TRUE_VALUE : 0);
1651 e1->probability = then_prob;
1652 e->probability = else_prob;
1653 e1->count = e->count.apply_probability (e1->probability);
1654 e->count = e->count.apply_probability (e->probability);
1656 set_immediate_dominator (CDI_DOMINATORS, first_head, new_head);
1657 set_immediate_dominator (CDI_DOMINATORS, second_head, new_head);
1659 /* Adjust loop header phi nodes. */
1660 lv_adjust_loop_header_phi (first_head, second_head, new_head, e1);
1662 return new_head;
1665 /* Main entry point for Loop Versioning transformation.
1667 This transformation given a condition and a loop, creates
1668 -if (condition) { loop_copy1 } else { loop_copy2 },
1669 where loop_copy1 is the loop transformed in one way, and loop_copy2
1670 is the loop transformed in another way (or unchanged). COND_EXPR
1671 may be a run time test for things that were not resolved by static
1672 analysis (overlapping ranges (anti-aliasing), alignment, etc.).
1674 If non-NULL, CONDITION_BB is set to the basic block containing the
1675 condition.
1677 THEN_PROB is the probability of the then edge of the if. THEN_SCALE
1678 is the ratio by that the frequencies in the original loop should
1679 be scaled. ELSE_SCALE is the ratio by that the frequencies in the
1680 new loop should be scaled.
1682 If PLACE_AFTER is true, we place the new loop after LOOP in the
1683 instruction stream, otherwise it is placed before LOOP. */
1685 struct loop *
1686 loop_version (struct loop *loop,
1687 void *cond_expr, basic_block *condition_bb,
1688 profile_probability then_prob, profile_probability else_prob,
1689 profile_probability then_scale, profile_probability else_scale,
1690 bool place_after)
1692 basic_block first_head, second_head;
1693 edge entry, latch_edge, true_edge, false_edge;
1694 int irred_flag;
1695 struct loop *nloop;
1696 basic_block cond_bb;
1698 /* Record entry and latch edges for the loop */
1699 entry = loop_preheader_edge (loop);
1700 irred_flag = entry->flags & EDGE_IRREDUCIBLE_LOOP;
1701 entry->flags &= ~EDGE_IRREDUCIBLE_LOOP;
1703 /* Note down head of loop as first_head. */
1704 first_head = entry->dest;
1706 /* Duplicate loop. */
1707 if (!cfg_hook_duplicate_loop_to_header_edge (loop, entry, 1,
1708 NULL, NULL, NULL, 0))
1710 entry->flags |= irred_flag;
1711 return NULL;
1714 /* After duplication entry edge now points to new loop head block.
1715 Note down new head as second_head. */
1716 second_head = entry->dest;
1718 /* Split loop entry edge and insert new block with cond expr. */
1719 cond_bb = lv_adjust_loop_entry_edge (first_head, second_head,
1720 entry, cond_expr, then_prob, else_prob);
1721 if (condition_bb)
1722 *condition_bb = cond_bb;
1724 if (!cond_bb)
1726 entry->flags |= irred_flag;
1727 return NULL;
1730 latch_edge = single_succ_edge (get_bb_copy (loop->latch));
1732 extract_cond_bb_edges (cond_bb, &true_edge, &false_edge);
1733 nloop = loopify (latch_edge,
1734 single_pred_edge (get_bb_copy (loop->header)),
1735 cond_bb, true_edge, false_edge,
1736 false /* Do not redirect all edges. */,
1737 then_scale, else_scale);
1739 copy_loop_info (loop, nloop);
1741 /* loopify redirected latch_edge. Update its PENDING_STMTS. */
1742 lv_flush_pending_stmts (latch_edge);
1744 /* loopify redirected condition_bb's succ edge. Update its PENDING_STMTS. */
1745 extract_cond_bb_edges (cond_bb, &true_edge, &false_edge);
1746 lv_flush_pending_stmts (false_edge);
1747 /* Adjust irreducible flag. */
1748 if (irred_flag)
1750 cond_bb->flags |= BB_IRREDUCIBLE_LOOP;
1751 loop_preheader_edge (loop)->flags |= EDGE_IRREDUCIBLE_LOOP;
1752 loop_preheader_edge (nloop)->flags |= EDGE_IRREDUCIBLE_LOOP;
1753 single_pred_edge (cond_bb)->flags |= EDGE_IRREDUCIBLE_LOOP;
1756 if (place_after)
1758 basic_block *bbs = get_loop_body_in_dom_order (nloop), after;
1759 unsigned i;
1761 after = loop->latch;
1763 for (i = 0; i < nloop->num_nodes; i++)
1765 move_block_after (bbs[i], after);
1766 after = bbs[i];
1768 free (bbs);
1771 /* At this point condition_bb is loop preheader with two successors,
1772 first_head and second_head. Make sure that loop preheader has only
1773 one successor. */
1774 split_edge (loop_preheader_edge (loop));
1775 split_edge (loop_preheader_edge (nloop));
1777 return nloop;