[AArch64] Remove use of wider vector modes
[official-gcc.git] / gcc / cfgloop.c
bloba1e778b85865569f813b28be414bca24ff13cf1c
1 /* Natural loop discovery code for GNU compiler.
2 Copyright (C) 2000-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "gimple-ssa.h"
29 #include "diagnostic-core.h"
30 #include "cfganal.h"
31 #include "cfgloop.h"
32 #include "gimple-iterator.h"
33 #include "dumpfile.h"
35 static void flow_loops_cfg_dump (FILE *);
37 /* Dump loop related CFG information. */
39 static void
40 flow_loops_cfg_dump (FILE *file)
42 basic_block bb;
44 if (!file)
45 return;
47 FOR_EACH_BB_FN (bb, cfun)
49 edge succ;
50 edge_iterator ei;
52 fprintf (file, ";; %d succs { ", bb->index);
53 FOR_EACH_EDGE (succ, ei, bb->succs)
54 fprintf (file, "%d ", succ->dest->index);
55 fprintf (file, "}\n");
59 /* Return nonzero if the nodes of LOOP are a subset of OUTER. */
61 bool
62 flow_loop_nested_p (const struct loop *outer, const struct loop *loop)
64 unsigned odepth = loop_depth (outer);
66 return (loop_depth (loop) > odepth
67 && (*loop->superloops)[odepth] == outer);
70 /* Returns the loop such that LOOP is nested DEPTH (indexed from zero)
71 loops within LOOP. */
73 struct loop *
74 superloop_at_depth (struct loop *loop, unsigned depth)
76 unsigned ldepth = loop_depth (loop);
78 gcc_assert (depth <= ldepth);
80 if (depth == ldepth)
81 return loop;
83 return (*loop->superloops)[depth];
86 /* Returns the list of the latch edges of LOOP. */
88 static vec<edge>
89 get_loop_latch_edges (const struct loop *loop)
91 edge_iterator ei;
92 edge e;
93 vec<edge> ret = vNULL;
95 FOR_EACH_EDGE (e, ei, loop->header->preds)
97 if (dominated_by_p (CDI_DOMINATORS, e->src, loop->header))
98 ret.safe_push (e);
101 return ret;
104 /* Dump the loop information specified by LOOP to the stream FILE
105 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
107 void
108 flow_loop_dump (const struct loop *loop, FILE *file,
109 void (*loop_dump_aux) (const struct loop *, FILE *, int),
110 int verbose)
112 basic_block *bbs;
113 unsigned i;
114 vec<edge> latches;
115 edge e;
117 if (! loop || ! loop->header)
118 return;
120 fprintf (file, ";;\n;; Loop %d\n", loop->num);
122 fprintf (file, ";; header %d, ", loop->header->index);
123 if (loop->latch)
124 fprintf (file, "latch %d\n", loop->latch->index);
125 else
127 fprintf (file, "multiple latches:");
128 latches = get_loop_latch_edges (loop);
129 FOR_EACH_VEC_ELT (latches, i, e)
130 fprintf (file, " %d", e->src->index);
131 latches.release ();
132 fprintf (file, "\n");
135 fprintf (file, ";; depth %d, outer %ld\n",
136 loop_depth (loop), (long) (loop_outer (loop)
137 ? loop_outer (loop)->num : -1));
139 if (loop->latch)
141 bool read_profile_p;
142 gcov_type nit = expected_loop_iterations_unbounded (loop, &read_profile_p);
143 if (read_profile_p && !loop->any_estimate)
144 fprintf (file, ";; profile-based iteration count: %" PRIu64 "\n",
145 (uint64_t) nit);
148 fprintf (file, ";; nodes:");
149 bbs = get_loop_body (loop);
150 for (i = 0; i < loop->num_nodes; i++)
151 fprintf (file, " %d", bbs[i]->index);
152 free (bbs);
153 fprintf (file, "\n");
155 if (loop_dump_aux)
156 loop_dump_aux (loop, file, verbose);
159 /* Dump the loop information about loops to the stream FILE,
160 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
162 void
163 flow_loops_dump (FILE *file, void (*loop_dump_aux) (const struct loop *, FILE *, int), int verbose)
165 struct loop *loop;
167 if (!current_loops || ! file)
168 return;
170 fprintf (file, ";; %d loops found\n", number_of_loops (cfun));
172 FOR_EACH_LOOP (loop, LI_INCLUDE_ROOT)
174 flow_loop_dump (loop, file, loop_dump_aux, verbose);
177 if (verbose)
178 flow_loops_cfg_dump (file);
181 /* Free data allocated for LOOP. */
183 void
184 flow_loop_free (struct loop *loop)
186 struct loop_exit *exit, *next;
188 vec_free (loop->superloops);
190 /* Break the list of the loop exit records. They will be freed when the
191 corresponding edge is rescanned or removed, and this avoids
192 accessing the (already released) head of the list stored in the
193 loop structure. */
194 for (exit = loop->exits->next; exit != loop->exits; exit = next)
196 next = exit->next;
197 exit->next = exit;
198 exit->prev = exit;
201 ggc_free (loop->exits);
202 ggc_free (loop);
205 /* Free all the memory allocated for LOOPS. */
207 void
208 flow_loops_free (struct loops *loops)
210 if (loops->larray)
212 unsigned i;
213 loop_p loop;
215 /* Free the loop descriptors. */
216 FOR_EACH_VEC_SAFE_ELT (loops->larray, i, loop)
218 if (!loop)
219 continue;
221 flow_loop_free (loop);
224 vec_free (loops->larray);
228 /* Find the nodes contained within the LOOP with header HEADER.
229 Return the number of nodes within the loop. */
232 flow_loop_nodes_find (basic_block header, struct loop *loop)
234 vec<basic_block> stack = vNULL;
235 int num_nodes = 1;
236 edge latch;
237 edge_iterator latch_ei;
239 header->loop_father = loop;
241 FOR_EACH_EDGE (latch, latch_ei, loop->header->preds)
243 if (latch->src->loop_father == loop
244 || !dominated_by_p (CDI_DOMINATORS, latch->src, loop->header))
245 continue;
247 num_nodes++;
248 stack.safe_push (latch->src);
249 latch->src->loop_father = loop;
251 while (!stack.is_empty ())
253 basic_block node;
254 edge e;
255 edge_iterator ei;
257 node = stack.pop ();
259 FOR_EACH_EDGE (e, ei, node->preds)
261 basic_block ancestor = e->src;
263 if (ancestor->loop_father != loop)
265 ancestor->loop_father = loop;
266 num_nodes++;
267 stack.safe_push (ancestor);
272 stack.release ();
274 return num_nodes;
277 /* Records the vector of superloops of the loop LOOP, whose immediate
278 superloop is FATHER. */
280 static void
281 establish_preds (struct loop *loop, struct loop *father)
283 loop_p ploop;
284 unsigned depth = loop_depth (father) + 1;
285 unsigned i;
287 loop->superloops = 0;
288 vec_alloc (loop->superloops, depth);
289 FOR_EACH_VEC_SAFE_ELT (father->superloops, i, ploop)
290 loop->superloops->quick_push (ploop);
291 loop->superloops->quick_push (father);
293 for (ploop = loop->inner; ploop; ploop = ploop->next)
294 establish_preds (ploop, loop);
297 /* Add LOOP to the loop hierarchy tree where FATHER is father of the
298 added loop. If LOOP has some children, take care of that their
299 pred field will be initialized correctly. */
301 void
302 flow_loop_tree_node_add (struct loop *father, struct loop *loop)
304 loop->next = father->inner;
305 father->inner = loop;
307 establish_preds (loop, father);
310 /* Remove LOOP from the loop hierarchy tree. */
312 void
313 flow_loop_tree_node_remove (struct loop *loop)
315 struct loop *prev, *father;
317 father = loop_outer (loop);
319 /* Remove loop from the list of sons. */
320 if (father->inner == loop)
321 father->inner = loop->next;
322 else
324 for (prev = father->inner; prev->next != loop; prev = prev->next)
325 continue;
326 prev->next = loop->next;
329 loop->superloops = NULL;
332 /* Allocates and returns new loop structure. */
334 struct loop *
335 alloc_loop (void)
337 struct loop *loop = ggc_cleared_alloc<struct loop> ();
339 loop->exits = ggc_cleared_alloc<loop_exit> ();
340 loop->exits->next = loop->exits->prev = loop->exits;
341 loop->can_be_parallel = false;
342 loop->constraints = 0;
343 loop->nb_iterations_upper_bound = 0;
344 loop->nb_iterations_likely_upper_bound = 0;
345 loop->nb_iterations_estimate = 0;
346 return loop;
349 /* Initializes loops structure LOOPS, reserving place for NUM_LOOPS loops
350 (including the root of the loop tree). */
352 void
353 init_loops_structure (struct function *fn,
354 struct loops *loops, unsigned num_loops)
356 struct loop *root;
358 memset (loops, 0, sizeof *loops);
359 vec_alloc (loops->larray, num_loops);
361 /* Dummy loop containing whole function. */
362 root = alloc_loop ();
363 root->num_nodes = n_basic_blocks_for_fn (fn);
364 root->latch = EXIT_BLOCK_PTR_FOR_FN (fn);
365 root->header = ENTRY_BLOCK_PTR_FOR_FN (fn);
366 ENTRY_BLOCK_PTR_FOR_FN (fn)->loop_father = root;
367 EXIT_BLOCK_PTR_FOR_FN (fn)->loop_father = root;
369 loops->larray->quick_push (root);
370 loops->tree_root = root;
373 /* Returns whether HEADER is a loop header. */
375 bool
376 bb_loop_header_p (basic_block header)
378 edge_iterator ei;
379 edge e;
381 /* If we have an abnormal predecessor, do not consider the
382 loop (not worth the problems). */
383 if (bb_has_abnormal_pred (header))
384 return false;
386 /* Look for back edges where a predecessor is dominated
387 by this block. A natural loop has a single entry
388 node (header) that dominates all the nodes in the
389 loop. It also has single back edge to the header
390 from a latch node. */
391 FOR_EACH_EDGE (e, ei, header->preds)
393 basic_block latch = e->src;
394 if (latch != ENTRY_BLOCK_PTR_FOR_FN (cfun)
395 && dominated_by_p (CDI_DOMINATORS, latch, header))
396 return true;
399 return false;
402 /* Find all the natural loops in the function and save in LOOPS structure and
403 recalculate loop_father information in basic block structures.
404 If LOOPS is non-NULL then the loop structures for already recorded loops
405 will be re-used and their number will not change. We assume that no
406 stale loops exist in LOOPS.
407 When LOOPS is NULL it is allocated and re-built from scratch.
408 Return the built LOOPS structure. */
410 struct loops *
411 flow_loops_find (struct loops *loops)
413 bool from_scratch = (loops == NULL);
414 int *rc_order;
415 int b;
416 unsigned i;
418 /* Ensure that the dominators are computed. */
419 calculate_dominance_info (CDI_DOMINATORS);
421 if (!loops)
423 loops = ggc_cleared_alloc<struct loops> ();
424 init_loops_structure (cfun, loops, 1);
427 /* Ensure that loop exits were released. */
428 gcc_assert (loops->exits == NULL);
430 /* Taking care of this degenerate case makes the rest of
431 this code simpler. */
432 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
433 return loops;
435 /* The root loop node contains all basic-blocks. */
436 loops->tree_root->num_nodes = n_basic_blocks_for_fn (cfun);
438 /* Compute depth first search order of the CFG so that outer
439 natural loops will be found before inner natural loops. */
440 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
441 pre_and_rev_post_order_compute (NULL, rc_order, false);
443 /* Gather all loop headers in reverse completion order and allocate
444 loop structures for loops that are not already present. */
445 auto_vec<loop_p> larray (loops->larray->length ());
446 for (b = 0; b < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; b++)
448 basic_block header = BASIC_BLOCK_FOR_FN (cfun, rc_order[b]);
449 if (bb_loop_header_p (header))
451 struct loop *loop;
453 /* The current active loop tree has valid loop-fathers for
454 header blocks. */
455 if (!from_scratch
456 && header->loop_father->header == header)
458 loop = header->loop_father;
459 /* If we found an existing loop remove it from the
460 loop tree. It is going to be inserted again
461 below. */
462 flow_loop_tree_node_remove (loop);
464 else
466 /* Otherwise allocate a new loop structure for the loop. */
467 loop = alloc_loop ();
468 /* ??? We could re-use unused loop slots here. */
469 loop->num = loops->larray->length ();
470 vec_safe_push (loops->larray, loop);
471 loop->header = header;
473 if (!from_scratch
474 && dump_file && (dump_flags & TDF_DETAILS))
475 fprintf (dump_file, "flow_loops_find: discovered new "
476 "loop %d with header %d\n",
477 loop->num, header->index);
479 /* Reset latch, we recompute it below. */
480 loop->latch = NULL;
481 larray.safe_push (loop);
484 /* Make blocks part of the loop root node at start. */
485 header->loop_father = loops->tree_root;
488 free (rc_order);
490 /* Now iterate over the loops found, insert them into the loop tree
491 and assign basic-block ownership. */
492 for (i = 0; i < larray.length (); ++i)
494 struct loop *loop = larray[i];
495 basic_block header = loop->header;
496 edge_iterator ei;
497 edge e;
499 flow_loop_tree_node_add (header->loop_father, loop);
500 loop->num_nodes = flow_loop_nodes_find (loop->header, loop);
502 /* Look for the latch for this header block, if it has just a
503 single one. */
504 FOR_EACH_EDGE (e, ei, header->preds)
506 basic_block latch = e->src;
508 if (flow_bb_inside_loop_p (loop, latch))
510 if (loop->latch != NULL)
512 /* More than one latch edge. */
513 loop->latch = NULL;
514 break;
516 loop->latch = latch;
521 return loops;
524 /* Ratio of frequencies of edges so that one of more latch edges is
525 considered to belong to inner loop with same header. */
526 #define HEAVY_EDGE_RATIO 8
528 /* Minimum number of samples for that we apply
529 find_subloop_latch_edge_by_profile heuristics. */
530 #define HEAVY_EDGE_MIN_SAMPLES 10
532 /* If the profile info is available, finds an edge in LATCHES that much more
533 frequent than the remaining edges. Returns such an edge, or NULL if we do
534 not find one.
536 We do not use guessed profile here, only the measured one. The guessed
537 profile is usually too flat and unreliable for this (and it is mostly based
538 on the loop structure of the program, so it does not make much sense to
539 derive the loop structure from it). */
541 static edge
542 find_subloop_latch_edge_by_profile (vec<edge> latches)
544 unsigned i;
545 edge e, me = NULL;
546 profile_count mcount = profile_count::zero (), tcount = profile_count::zero ();
548 FOR_EACH_VEC_ELT (latches, i, e)
550 if (e->count > mcount)
552 me = e;
553 mcount = e->count;
555 tcount += e->count;
558 if (!tcount.initialized_p () || tcount < HEAVY_EDGE_MIN_SAMPLES
559 || (tcount - mcount).apply_scale (HEAVY_EDGE_RATIO, 1) > tcount)
560 return NULL;
562 if (dump_file)
563 fprintf (dump_file,
564 "Found latch edge %d -> %d using profile information.\n",
565 me->src->index, me->dest->index);
566 return me;
569 /* Among LATCHES, guesses a latch edge of LOOP corresponding to subloop, based
570 on the structure of induction variables. Returns this edge, or NULL if we
571 do not find any.
573 We are quite conservative, and look just for an obvious simple innermost
574 loop (which is the case where we would lose the most performance by not
575 disambiguating the loop). More precisely, we look for the following
576 situation: The source of the chosen latch edge dominates sources of all
577 the other latch edges. Additionally, the header does not contain a phi node
578 such that the argument from the chosen edge is equal to the argument from
579 another edge. */
581 static edge
582 find_subloop_latch_edge_by_ivs (struct loop *loop ATTRIBUTE_UNUSED, vec<edge> latches)
584 edge e, latch = latches[0];
585 unsigned i;
586 gphi *phi;
587 gphi_iterator psi;
588 tree lop;
589 basic_block bb;
591 /* Find the candidate for the latch edge. */
592 for (i = 1; latches.iterate (i, &e); i++)
593 if (dominated_by_p (CDI_DOMINATORS, latch->src, e->src))
594 latch = e;
596 /* Verify that it dominates all the latch edges. */
597 FOR_EACH_VEC_ELT (latches, i, e)
598 if (!dominated_by_p (CDI_DOMINATORS, e->src, latch->src))
599 return NULL;
601 /* Check for a phi node that would deny that this is a latch edge of
602 a subloop. */
603 for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
605 phi = psi.phi ();
606 lop = PHI_ARG_DEF_FROM_EDGE (phi, latch);
608 /* Ignore the values that are not changed inside the subloop. */
609 if (TREE_CODE (lop) != SSA_NAME
610 || SSA_NAME_DEF_STMT (lop) == phi)
611 continue;
612 bb = gimple_bb (SSA_NAME_DEF_STMT (lop));
613 if (!bb || !flow_bb_inside_loop_p (loop, bb))
614 continue;
616 FOR_EACH_VEC_ELT (latches, i, e)
617 if (e != latch
618 && PHI_ARG_DEF_FROM_EDGE (phi, e) == lop)
619 return NULL;
622 if (dump_file)
623 fprintf (dump_file,
624 "Found latch edge %d -> %d using iv structure.\n",
625 latch->src->index, latch->dest->index);
626 return latch;
629 /* If we can determine that one of the several latch edges of LOOP behaves
630 as a latch edge of a separate subloop, returns this edge. Otherwise
631 returns NULL. */
633 static edge
634 find_subloop_latch_edge (struct loop *loop)
636 vec<edge> latches = get_loop_latch_edges (loop);
637 edge latch = NULL;
639 if (latches.length () > 1)
641 latch = find_subloop_latch_edge_by_profile (latches);
643 if (!latch
644 /* We consider ivs to guess the latch edge only in SSA. Perhaps we
645 should use cfghook for this, but it is hard to imagine it would
646 be useful elsewhere. */
647 && current_ir_type () == IR_GIMPLE)
648 latch = find_subloop_latch_edge_by_ivs (loop, latches);
651 latches.release ();
652 return latch;
655 /* Callback for make_forwarder_block. Returns true if the edge E is marked
656 in the set MFB_REIS_SET. */
658 static hash_set<edge> *mfb_reis_set;
659 static bool
660 mfb_redirect_edges_in_set (edge e)
662 return mfb_reis_set->contains (e);
665 /* Creates a subloop of LOOP with latch edge LATCH. */
667 static void
668 form_subloop (struct loop *loop, edge latch)
670 edge_iterator ei;
671 edge e, new_entry;
672 struct loop *new_loop;
674 mfb_reis_set = new hash_set<edge>;
675 FOR_EACH_EDGE (e, ei, loop->header->preds)
677 if (e != latch)
678 mfb_reis_set->add (e);
680 new_entry = make_forwarder_block (loop->header, mfb_redirect_edges_in_set,
681 NULL);
682 delete mfb_reis_set;
684 loop->header = new_entry->src;
686 /* Find the blocks and subloops that belong to the new loop, and add it to
687 the appropriate place in the loop tree. */
688 new_loop = alloc_loop ();
689 new_loop->header = new_entry->dest;
690 new_loop->latch = latch->src;
691 add_loop (new_loop, loop);
694 /* Make all the latch edges of LOOP to go to a single forwarder block --
695 a new latch of LOOP. */
697 static void
698 merge_latch_edges (struct loop *loop)
700 vec<edge> latches = get_loop_latch_edges (loop);
701 edge latch, e;
702 unsigned i;
704 gcc_assert (latches.length () > 0);
706 if (latches.length () == 1)
707 loop->latch = latches[0]->src;
708 else
710 if (dump_file)
711 fprintf (dump_file, "Merged latch edges of loop %d\n", loop->num);
713 mfb_reis_set = new hash_set<edge>;
714 FOR_EACH_VEC_ELT (latches, i, e)
715 mfb_reis_set->add (e);
716 latch = make_forwarder_block (loop->header, mfb_redirect_edges_in_set,
717 NULL);
718 delete mfb_reis_set;
720 loop->header = latch->dest;
721 loop->latch = latch->src;
724 latches.release ();
727 /* LOOP may have several latch edges. Transform it into (possibly several)
728 loops with single latch edge. */
730 static void
731 disambiguate_multiple_latches (struct loop *loop)
733 edge e;
735 /* We eliminate the multiple latches by splitting the header to the forwarder
736 block F and the rest R, and redirecting the edges. There are two cases:
738 1) If there is a latch edge E that corresponds to a subloop (we guess
739 that based on profile -- if it is taken much more often than the
740 remaining edges; and on trees, using the information about induction
741 variables of the loops), we redirect E to R, all the remaining edges to
742 F, then rescan the loops and try again for the outer loop.
743 2) If there is no such edge, we redirect all latch edges to F, and the
744 entry edges to R, thus making F the single latch of the loop. */
746 if (dump_file)
747 fprintf (dump_file, "Disambiguating loop %d with multiple latches\n",
748 loop->num);
750 /* During latch merging, we may need to redirect the entry edges to a new
751 block. This would cause problems if the entry edge was the one from the
752 entry block. To avoid having to handle this case specially, split
753 such entry edge. */
754 e = find_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), loop->header);
755 if (e)
756 split_edge (e);
758 while (1)
760 e = find_subloop_latch_edge (loop);
761 if (!e)
762 break;
764 form_subloop (loop, e);
767 merge_latch_edges (loop);
770 /* Split loops with multiple latch edges. */
772 void
773 disambiguate_loops_with_multiple_latches (void)
775 struct loop *loop;
777 FOR_EACH_LOOP (loop, 0)
779 if (!loop->latch)
780 disambiguate_multiple_latches (loop);
784 /* Return nonzero if basic block BB belongs to LOOP. */
785 bool
786 flow_bb_inside_loop_p (const struct loop *loop, const_basic_block bb)
788 struct loop *source_loop;
790 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
791 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
792 return 0;
794 source_loop = bb->loop_father;
795 return loop == source_loop || flow_loop_nested_p (loop, source_loop);
798 /* Enumeration predicate for get_loop_body_with_size. */
799 static bool
800 glb_enum_p (const_basic_block bb, const void *glb_loop)
802 const struct loop *const loop = (const struct loop *) glb_loop;
803 return (bb != loop->header
804 && dominated_by_p (CDI_DOMINATORS, bb, loop->header));
807 /* Gets basic blocks of a LOOP. Header is the 0-th block, rest is in dfs
808 order against direction of edges from latch. Specially, if
809 header != latch, latch is the 1-st block. LOOP cannot be the fake
810 loop tree root, and its size must be at most MAX_SIZE. The blocks
811 in the LOOP body are stored to BODY, and the size of the LOOP is
812 returned. */
814 unsigned
815 get_loop_body_with_size (const struct loop *loop, basic_block *body,
816 unsigned max_size)
818 return dfs_enumerate_from (loop->header, 1, glb_enum_p,
819 body, max_size, loop);
822 /* Gets basic blocks of a LOOP. Header is the 0-th block, rest is in dfs
823 order against direction of edges from latch. Specially, if
824 header != latch, latch is the 1-st block. */
826 basic_block *
827 get_loop_body (const struct loop *loop)
829 basic_block *body, bb;
830 unsigned tv = 0;
832 gcc_assert (loop->num_nodes);
834 body = XNEWVEC (basic_block, loop->num_nodes);
836 if (loop->latch == EXIT_BLOCK_PTR_FOR_FN (cfun))
838 /* There may be blocks unreachable from EXIT_BLOCK, hence we need to
839 special-case the fake loop that contains the whole function. */
840 gcc_assert (loop->num_nodes == (unsigned) n_basic_blocks_for_fn (cfun));
841 body[tv++] = loop->header;
842 body[tv++] = EXIT_BLOCK_PTR_FOR_FN (cfun);
843 FOR_EACH_BB_FN (bb, cfun)
844 body[tv++] = bb;
846 else
847 tv = get_loop_body_with_size (loop, body, loop->num_nodes);
849 gcc_assert (tv == loop->num_nodes);
850 return body;
853 /* Fills dominance descendants inside LOOP of the basic block BB into
854 array TOVISIT from index *TV. */
856 static void
857 fill_sons_in_loop (const struct loop *loop, basic_block bb,
858 basic_block *tovisit, int *tv)
860 basic_block son, postpone = NULL;
862 tovisit[(*tv)++] = bb;
863 for (son = first_dom_son (CDI_DOMINATORS, bb);
864 son;
865 son = next_dom_son (CDI_DOMINATORS, son))
867 if (!flow_bb_inside_loop_p (loop, son))
868 continue;
870 if (dominated_by_p (CDI_DOMINATORS, loop->latch, son))
872 postpone = son;
873 continue;
875 fill_sons_in_loop (loop, son, tovisit, tv);
878 if (postpone)
879 fill_sons_in_loop (loop, postpone, tovisit, tv);
882 /* Gets body of a LOOP (that must be different from the outermost loop)
883 sorted by dominance relation. Additionally, if a basic block s dominates
884 the latch, then only blocks dominated by s are be after it. */
886 basic_block *
887 get_loop_body_in_dom_order (const struct loop *loop)
889 basic_block *tovisit;
890 int tv;
892 gcc_assert (loop->num_nodes);
894 tovisit = XNEWVEC (basic_block, loop->num_nodes);
896 gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
898 tv = 0;
899 fill_sons_in_loop (loop, loop->header, tovisit, &tv);
901 gcc_assert (tv == (int) loop->num_nodes);
903 return tovisit;
906 /* Gets body of a LOOP sorted via provided BB_COMPARATOR. */
908 basic_block *
909 get_loop_body_in_custom_order (const struct loop *loop,
910 int (*bb_comparator) (const void *, const void *))
912 basic_block *bbs = get_loop_body (loop);
914 qsort (bbs, loop->num_nodes, sizeof (basic_block), bb_comparator);
916 return bbs;
919 /* Get body of a LOOP in breadth first sort order. */
921 basic_block *
922 get_loop_body_in_bfs_order (const struct loop *loop)
924 basic_block *blocks;
925 basic_block bb;
926 unsigned int i = 1;
927 unsigned int vc = 0;
929 gcc_assert (loop->num_nodes);
930 gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
932 blocks = XNEWVEC (basic_block, loop->num_nodes);
933 auto_bitmap visited;
934 blocks[0] = loop->header;
935 bitmap_set_bit (visited, loop->header->index);
936 while (i < loop->num_nodes)
938 edge e;
939 edge_iterator ei;
940 gcc_assert (i > vc);
941 bb = blocks[vc++];
943 FOR_EACH_EDGE (e, ei, bb->succs)
945 if (flow_bb_inside_loop_p (loop, e->dest))
947 /* This bb is now visited. */
948 if (bitmap_set_bit (visited, e->dest->index))
949 blocks[i++] = e->dest;
954 return blocks;
957 /* Hash function for struct loop_exit. */
959 hashval_t
960 loop_exit_hasher::hash (loop_exit *exit)
962 return htab_hash_pointer (exit->e);
965 /* Equality function for struct loop_exit. Compares with edge. */
967 bool
968 loop_exit_hasher::equal (loop_exit *exit, edge e)
970 return exit->e == e;
973 /* Frees the list of loop exit descriptions EX. */
975 void
976 loop_exit_hasher::remove (loop_exit *exit)
978 loop_exit *next;
979 for (; exit; exit = next)
981 next = exit->next_e;
983 exit->next->prev = exit->prev;
984 exit->prev->next = exit->next;
986 ggc_free (exit);
990 /* Returns the list of records for E as an exit of a loop. */
992 static struct loop_exit *
993 get_exit_descriptions (edge e)
995 return current_loops->exits->find_with_hash (e, htab_hash_pointer (e));
998 /* Updates the lists of loop exits in that E appears.
999 If REMOVED is true, E is being removed, and we
1000 just remove it from the lists of exits.
1001 If NEW_EDGE is true and E is not a loop exit, we
1002 do not try to remove it from loop exit lists. */
1004 void
1005 rescan_loop_exit (edge e, bool new_edge, bool removed)
1007 struct loop_exit *exits = NULL, *exit;
1008 struct loop *aloop, *cloop;
1010 if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1011 return;
1013 if (!removed
1014 && e->src->loop_father != NULL
1015 && e->dest->loop_father != NULL
1016 && !flow_bb_inside_loop_p (e->src->loop_father, e->dest))
1018 cloop = find_common_loop (e->src->loop_father, e->dest->loop_father);
1019 for (aloop = e->src->loop_father;
1020 aloop != cloop;
1021 aloop = loop_outer (aloop))
1023 exit = ggc_alloc<loop_exit> ();
1024 exit->e = e;
1026 exit->next = aloop->exits->next;
1027 exit->prev = aloop->exits;
1028 exit->next->prev = exit;
1029 exit->prev->next = exit;
1031 exit->next_e = exits;
1032 exits = exit;
1036 if (!exits && new_edge)
1037 return;
1039 loop_exit **slot
1040 = current_loops->exits->find_slot_with_hash (e, htab_hash_pointer (e),
1041 exits ? INSERT : NO_INSERT);
1042 if (!slot)
1043 return;
1045 if (exits)
1047 if (*slot)
1048 loop_exit_hasher::remove (*slot);
1049 *slot = exits;
1051 else
1052 current_loops->exits->clear_slot (slot);
1055 /* For each loop, record list of exit edges, and start maintaining these
1056 lists. */
1058 void
1059 record_loop_exits (void)
1061 basic_block bb;
1062 edge_iterator ei;
1063 edge e;
1065 if (!current_loops)
1066 return;
1068 if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1069 return;
1070 loops_state_set (LOOPS_HAVE_RECORDED_EXITS);
1072 gcc_assert (current_loops->exits == NULL);
1073 current_loops->exits
1074 = hash_table<loop_exit_hasher>::create_ggc (2 * number_of_loops (cfun));
1076 FOR_EACH_BB_FN (bb, cfun)
1078 FOR_EACH_EDGE (e, ei, bb->succs)
1080 rescan_loop_exit (e, true, false);
1085 /* Dumps information about the exit in *SLOT to FILE.
1086 Callback for htab_traverse. */
1089 dump_recorded_exit (loop_exit **slot, FILE *file)
1091 struct loop_exit *exit = *slot;
1092 unsigned n = 0;
1093 edge e = exit->e;
1095 for (; exit != NULL; exit = exit->next_e)
1096 n++;
1098 fprintf (file, "Edge %d->%d exits %u loops\n",
1099 e->src->index, e->dest->index, n);
1101 return 1;
1104 /* Dumps the recorded exits of loops to FILE. */
1106 extern void dump_recorded_exits (FILE *);
1107 void
1108 dump_recorded_exits (FILE *file)
1110 if (!current_loops->exits)
1111 return;
1112 current_loops->exits->traverse<FILE *, dump_recorded_exit> (file);
1115 /* Releases lists of loop exits. */
1117 void
1118 release_recorded_exits (function *fn)
1120 gcc_assert (loops_state_satisfies_p (fn, LOOPS_HAVE_RECORDED_EXITS));
1121 loops_for_fn (fn)->exits->empty ();
1122 loops_for_fn (fn)->exits = NULL;
1123 loops_state_clear (fn, LOOPS_HAVE_RECORDED_EXITS);
1126 /* Returns the list of the exit edges of a LOOP. */
1128 vec<edge>
1129 get_loop_exit_edges (const struct loop *loop)
1131 vec<edge> edges = vNULL;
1132 edge e;
1133 unsigned i;
1134 basic_block *body;
1135 edge_iterator ei;
1136 struct loop_exit *exit;
1138 gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
1140 /* If we maintain the lists of exits, use them. Otherwise we must
1141 scan the body of the loop. */
1142 if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1144 for (exit = loop->exits->next; exit->e; exit = exit->next)
1145 edges.safe_push (exit->e);
1147 else
1149 body = get_loop_body (loop);
1150 for (i = 0; i < loop->num_nodes; i++)
1151 FOR_EACH_EDGE (e, ei, body[i]->succs)
1153 if (!flow_bb_inside_loop_p (loop, e->dest))
1154 edges.safe_push (e);
1156 free (body);
1159 return edges;
1162 /* Counts the number of conditional branches inside LOOP. */
1164 unsigned
1165 num_loop_branches (const struct loop *loop)
1167 unsigned i, n;
1168 basic_block * body;
1170 gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
1172 body = get_loop_body (loop);
1173 n = 0;
1174 for (i = 0; i < loop->num_nodes; i++)
1175 if (EDGE_COUNT (body[i]->succs) >= 2)
1176 n++;
1177 free (body);
1179 return n;
1182 /* Adds basic block BB to LOOP. */
1183 void
1184 add_bb_to_loop (basic_block bb, struct loop *loop)
1186 unsigned i;
1187 loop_p ploop;
1188 edge_iterator ei;
1189 edge e;
1191 gcc_assert (bb->loop_father == NULL);
1192 bb->loop_father = loop;
1193 loop->num_nodes++;
1194 FOR_EACH_VEC_SAFE_ELT (loop->superloops, i, ploop)
1195 ploop->num_nodes++;
1197 FOR_EACH_EDGE (e, ei, bb->succs)
1199 rescan_loop_exit (e, true, false);
1201 FOR_EACH_EDGE (e, ei, bb->preds)
1203 rescan_loop_exit (e, true, false);
1207 /* Remove basic block BB from loops. */
1208 void
1209 remove_bb_from_loops (basic_block bb)
1211 unsigned i;
1212 struct loop *loop = bb->loop_father;
1213 loop_p ploop;
1214 edge_iterator ei;
1215 edge e;
1217 gcc_assert (loop != NULL);
1218 loop->num_nodes--;
1219 FOR_EACH_VEC_SAFE_ELT (loop->superloops, i, ploop)
1220 ploop->num_nodes--;
1221 bb->loop_father = NULL;
1223 FOR_EACH_EDGE (e, ei, bb->succs)
1225 rescan_loop_exit (e, false, true);
1227 FOR_EACH_EDGE (e, ei, bb->preds)
1229 rescan_loop_exit (e, false, true);
1233 /* Finds nearest common ancestor in loop tree for given loops. */
1234 struct loop *
1235 find_common_loop (struct loop *loop_s, struct loop *loop_d)
1237 unsigned sdepth, ddepth;
1239 if (!loop_s) return loop_d;
1240 if (!loop_d) return loop_s;
1242 sdepth = loop_depth (loop_s);
1243 ddepth = loop_depth (loop_d);
1245 if (sdepth < ddepth)
1246 loop_d = (*loop_d->superloops)[sdepth];
1247 else if (sdepth > ddepth)
1248 loop_s = (*loop_s->superloops)[ddepth];
1250 while (loop_s != loop_d)
1252 loop_s = loop_outer (loop_s);
1253 loop_d = loop_outer (loop_d);
1255 return loop_s;
1258 /* Removes LOOP from structures and frees its data. */
1260 void
1261 delete_loop (struct loop *loop)
1263 /* Remove the loop from structure. */
1264 flow_loop_tree_node_remove (loop);
1266 /* Remove loop from loops array. */
1267 (*current_loops->larray)[loop->num] = NULL;
1269 /* Free loop data. */
1270 flow_loop_free (loop);
1273 /* Cancels the LOOP; it must be innermost one. */
1275 static void
1276 cancel_loop (struct loop *loop)
1278 basic_block *bbs;
1279 unsigned i;
1280 struct loop *outer = loop_outer (loop);
1282 gcc_assert (!loop->inner);
1284 /* Move blocks up one level (they should be removed as soon as possible). */
1285 bbs = get_loop_body (loop);
1286 for (i = 0; i < loop->num_nodes; i++)
1287 bbs[i]->loop_father = outer;
1289 free (bbs);
1290 delete_loop (loop);
1293 /* Cancels LOOP and all its subloops. */
1294 void
1295 cancel_loop_tree (struct loop *loop)
1297 while (loop->inner)
1298 cancel_loop_tree (loop->inner);
1299 cancel_loop (loop);
1302 /* Checks that information about loops is correct
1303 -- sizes of loops are all right
1304 -- results of get_loop_body really belong to the loop
1305 -- loop header have just single entry edge and single latch edge
1306 -- loop latches have only single successor that is header of their loop
1307 -- irreducible loops are correctly marked
1308 -- the cached loop depth and loop father of each bb is correct
1310 DEBUG_FUNCTION void
1311 verify_loop_structure (void)
1313 unsigned *sizes, i, j;
1314 basic_block bb, *bbs;
1315 struct loop *loop;
1316 int err = 0;
1317 edge e;
1318 unsigned num = number_of_loops (cfun);
1319 struct loop_exit *exit, *mexit;
1320 bool dom_available = dom_info_available_p (CDI_DOMINATORS);
1322 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
1324 error ("loop verification on loop tree that needs fixup");
1325 err = 1;
1328 /* We need up-to-date dominators, compute or verify them. */
1329 if (!dom_available)
1330 calculate_dominance_info (CDI_DOMINATORS);
1331 else
1332 verify_dominators (CDI_DOMINATORS);
1334 /* Check the loop tree root. */
1335 if (current_loops->tree_root->header != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1336 || current_loops->tree_root->latch != EXIT_BLOCK_PTR_FOR_FN (cfun)
1337 || (current_loops->tree_root->num_nodes
1338 != (unsigned) n_basic_blocks_for_fn (cfun)))
1340 error ("corrupt loop tree root");
1341 err = 1;
1344 /* Check the headers. */
1345 FOR_EACH_BB_FN (bb, cfun)
1346 if (bb_loop_header_p (bb))
1348 if (bb->loop_father->header == NULL)
1350 error ("loop with header %d marked for removal", bb->index);
1351 err = 1;
1353 else if (bb->loop_father->header != bb)
1355 error ("loop with header %d not in loop tree", bb->index);
1356 err = 1;
1359 else if (bb->loop_father->header == bb)
1361 error ("non-loop with header %d not marked for removal", bb->index);
1362 err = 1;
1365 /* Check the recorded loop father and sizes of loops. */
1366 auto_sbitmap visited (last_basic_block_for_fn (cfun));
1367 bitmap_clear (visited);
1368 bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1369 FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
1371 unsigned n;
1373 if (loop->header == NULL)
1375 error ("removed loop %d in loop tree", loop->num);
1376 err = 1;
1377 continue;
1380 n = get_loop_body_with_size (loop, bbs, n_basic_blocks_for_fn (cfun));
1381 if (loop->num_nodes != n)
1383 error ("size of loop %d should be %d, not %d",
1384 loop->num, n, loop->num_nodes);
1385 err = 1;
1388 for (j = 0; j < n; j++)
1390 bb = bbs[j];
1392 if (!flow_bb_inside_loop_p (loop, bb))
1394 error ("bb %d does not belong to loop %d",
1395 bb->index, loop->num);
1396 err = 1;
1399 /* Ignore this block if it is in an inner loop. */
1400 if (bitmap_bit_p (visited, bb->index))
1401 continue;
1402 bitmap_set_bit (visited, bb->index);
1404 if (bb->loop_father != loop)
1406 error ("bb %d has father loop %d, should be loop %d",
1407 bb->index, bb->loop_father->num, loop->num);
1408 err = 1;
1412 free (bbs);
1414 /* Check headers and latches. */
1415 FOR_EACH_LOOP (loop, 0)
1417 i = loop->num;
1418 if (loop->header == NULL)
1419 continue;
1420 if (!bb_loop_header_p (loop->header))
1422 error ("loop %d%'s header is not a loop header", i);
1423 err = 1;
1425 if (loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS)
1426 && EDGE_COUNT (loop->header->preds) != 2)
1428 error ("loop %d%'s header does not have exactly 2 entries", i);
1429 err = 1;
1431 if (loop->latch)
1433 if (!find_edge (loop->latch, loop->header))
1435 error ("loop %d%'s latch does not have an edge to its header", i);
1436 err = 1;
1438 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, loop->header))
1440 error ("loop %d%'s latch is not dominated by its header", i);
1441 err = 1;
1444 if (loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES))
1446 if (!single_succ_p (loop->latch))
1448 error ("loop %d%'s latch does not have exactly 1 successor", i);
1449 err = 1;
1451 if (single_succ (loop->latch) != loop->header)
1453 error ("loop %d%'s latch does not have header as successor", i);
1454 err = 1;
1456 if (loop->latch->loop_father != loop)
1458 error ("loop %d%'s latch does not belong directly to it", i);
1459 err = 1;
1462 if (loop->header->loop_father != loop)
1464 error ("loop %d%'s header does not belong directly to it", i);
1465 err = 1;
1467 if (loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS)
1468 && (loop_latch_edge (loop)->flags & EDGE_IRREDUCIBLE_LOOP))
1470 error ("loop %d%'s latch is marked as part of irreducible region", i);
1471 err = 1;
1475 /* Check irreducible loops. */
1476 if (loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS))
1478 /* Record old info. */
1479 auto_sbitmap irreds (last_basic_block_for_fn (cfun));
1480 FOR_EACH_BB_FN (bb, cfun)
1482 edge_iterator ei;
1483 if (bb->flags & BB_IRREDUCIBLE_LOOP)
1484 bitmap_set_bit (irreds, bb->index);
1485 else
1486 bitmap_clear_bit (irreds, bb->index);
1487 FOR_EACH_EDGE (e, ei, bb->succs)
1488 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
1489 e->flags |= EDGE_ALL_FLAGS + 1;
1492 /* Recount it. */
1493 mark_irreducible_loops ();
1495 /* Compare. */
1496 FOR_EACH_BB_FN (bb, cfun)
1498 edge_iterator ei;
1500 if ((bb->flags & BB_IRREDUCIBLE_LOOP)
1501 && !bitmap_bit_p (irreds, bb->index))
1503 error ("basic block %d should be marked irreducible", bb->index);
1504 err = 1;
1506 else if (!(bb->flags & BB_IRREDUCIBLE_LOOP)
1507 && bitmap_bit_p (irreds, bb->index))
1509 error ("basic block %d should not be marked irreducible", bb->index);
1510 err = 1;
1512 FOR_EACH_EDGE (e, ei, bb->succs)
1514 if ((e->flags & EDGE_IRREDUCIBLE_LOOP)
1515 && !(e->flags & (EDGE_ALL_FLAGS + 1)))
1517 error ("edge from %d to %d should be marked irreducible",
1518 e->src->index, e->dest->index);
1519 err = 1;
1521 else if (!(e->flags & EDGE_IRREDUCIBLE_LOOP)
1522 && (e->flags & (EDGE_ALL_FLAGS + 1)))
1524 error ("edge from %d to %d should not be marked irreducible",
1525 e->src->index, e->dest->index);
1526 err = 1;
1528 e->flags &= ~(EDGE_ALL_FLAGS + 1);
1533 /* Check the recorded loop exits. */
1534 FOR_EACH_LOOP (loop, 0)
1536 if (!loop->exits || loop->exits->e != NULL)
1538 error ("corrupted head of the exits list of loop %d",
1539 loop->num);
1540 err = 1;
1542 else
1544 /* Check that the list forms a cycle, and all elements except
1545 for the head are nonnull. */
1546 for (mexit = loop->exits, exit = mexit->next, i = 0;
1547 exit->e && exit != mexit;
1548 exit = exit->next)
1550 if (i++ & 1)
1551 mexit = mexit->next;
1554 if (exit != loop->exits)
1556 error ("corrupted exits list of loop %d", loop->num);
1557 err = 1;
1561 if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1563 if (loop->exits->next != loop->exits)
1565 error ("nonempty exits list of loop %d, but exits are not recorded",
1566 loop->num);
1567 err = 1;
1572 if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1574 unsigned n_exits = 0, eloops;
1576 sizes = XCNEWVEC (unsigned, num);
1577 memset (sizes, 0, sizeof (unsigned) * num);
1578 FOR_EACH_BB_FN (bb, cfun)
1580 edge_iterator ei;
1581 if (bb->loop_father == current_loops->tree_root)
1582 continue;
1583 FOR_EACH_EDGE (e, ei, bb->succs)
1585 if (flow_bb_inside_loop_p (bb->loop_father, e->dest))
1586 continue;
1588 n_exits++;
1589 exit = get_exit_descriptions (e);
1590 if (!exit)
1592 error ("exit %d->%d not recorded",
1593 e->src->index, e->dest->index);
1594 err = 1;
1596 eloops = 0;
1597 for (; exit; exit = exit->next_e)
1598 eloops++;
1600 for (loop = bb->loop_father;
1601 loop != e->dest->loop_father
1602 /* When a loop exit is also an entry edge which
1603 can happen when avoiding CFG manipulations
1604 then the last loop exited is the outer loop
1605 of the loop entered. */
1606 && loop != loop_outer (e->dest->loop_father);
1607 loop = loop_outer (loop))
1609 eloops--;
1610 sizes[loop->num]++;
1613 if (eloops != 0)
1615 error ("wrong list of exited loops for edge %d->%d",
1616 e->src->index, e->dest->index);
1617 err = 1;
1622 if (n_exits != current_loops->exits->elements ())
1624 error ("too many loop exits recorded");
1625 err = 1;
1628 FOR_EACH_LOOP (loop, 0)
1630 eloops = 0;
1631 for (exit = loop->exits->next; exit->e; exit = exit->next)
1632 eloops++;
1633 if (eloops != sizes[loop->num])
1635 error ("%d exits recorded for loop %d (having %d exits)",
1636 eloops, loop->num, sizes[loop->num]);
1637 err = 1;
1641 free (sizes);
1644 gcc_assert (!err);
1646 if (!dom_available)
1647 free_dominance_info (CDI_DOMINATORS);
1650 /* Returns latch edge of LOOP. */
1651 edge
1652 loop_latch_edge (const struct loop *loop)
1654 return find_edge (loop->latch, loop->header);
1657 /* Returns preheader edge of LOOP. */
1658 edge
1659 loop_preheader_edge (const struct loop *loop)
1661 edge e;
1662 edge_iterator ei;
1664 gcc_assert (loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS));
1666 FOR_EACH_EDGE (e, ei, loop->header->preds)
1667 if (e->src != loop->latch)
1668 break;
1670 return e;
1673 /* Returns true if E is an exit of LOOP. */
1675 bool
1676 loop_exit_edge_p (const struct loop *loop, const_edge e)
1678 return (flow_bb_inside_loop_p (loop, e->src)
1679 && !flow_bb_inside_loop_p (loop, e->dest));
1682 /* Returns the single exit edge of LOOP, or NULL if LOOP has either no exit
1683 or more than one exit. If loops do not have the exits recorded, NULL
1684 is returned always. */
1686 edge
1687 single_exit (const struct loop *loop)
1689 struct loop_exit *exit = loop->exits->next;
1691 if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1692 return NULL;
1694 if (exit->e && exit->next == loop->exits)
1695 return exit->e;
1696 else
1697 return NULL;
1700 /* Returns true when BB has an incoming edge exiting LOOP. */
1702 bool
1703 loop_exits_to_bb_p (struct loop *loop, basic_block bb)
1705 edge e;
1706 edge_iterator ei;
1708 FOR_EACH_EDGE (e, ei, bb->preds)
1709 if (loop_exit_edge_p (loop, e))
1710 return true;
1712 return false;
1715 /* Returns true when BB has an outgoing edge exiting LOOP. */
1717 bool
1718 loop_exits_from_bb_p (struct loop *loop, basic_block bb)
1720 edge e;
1721 edge_iterator ei;
1723 FOR_EACH_EDGE (e, ei, bb->succs)
1724 if (loop_exit_edge_p (loop, e))
1725 return true;
1727 return false;
1730 /* Return location corresponding to the loop control condition if possible. */
1732 location_t
1733 get_loop_location (struct loop *loop)
1735 rtx_insn *insn = NULL;
1736 struct niter_desc *desc = NULL;
1737 edge exit;
1739 /* For a for or while loop, we would like to return the location
1740 of the for or while statement, if possible. To do this, look
1741 for the branch guarding the loop back-edge. */
1743 /* If this is a simple loop with an in_edge, then the loop control
1744 branch is typically at the end of its source. */
1745 desc = get_simple_loop_desc (loop);
1746 if (desc->in_edge)
1748 FOR_BB_INSNS_REVERSE (desc->in_edge->src, insn)
1750 if (INSN_P (insn) && INSN_HAS_LOCATION (insn))
1751 return INSN_LOCATION (insn);
1754 /* If loop has a single exit, then the loop control branch
1755 must be at the end of its source. */
1756 if ((exit = single_exit (loop)))
1758 FOR_BB_INSNS_REVERSE (exit->src, insn)
1760 if (INSN_P (insn) && INSN_HAS_LOCATION (insn))
1761 return INSN_LOCATION (insn);
1764 /* Next check the latch, to see if it is non-empty. */
1765 FOR_BB_INSNS_REVERSE (loop->latch, insn)
1767 if (INSN_P (insn) && INSN_HAS_LOCATION (insn))
1768 return INSN_LOCATION (insn);
1770 /* Finally, if none of the above identifies the loop control branch,
1771 return the first location in the loop header. */
1772 FOR_BB_INSNS (loop->header, insn)
1774 if (INSN_P (insn) && INSN_HAS_LOCATION (insn))
1775 return INSN_LOCATION (insn);
1777 /* If all else fails, simply return the current function location. */
1778 return DECL_SOURCE_LOCATION (current_function_decl);
1781 /* Records that every statement in LOOP is executed I_BOUND times.
1782 REALISTIC is true if I_BOUND is expected to be close to the real number
1783 of iterations. UPPER is true if we are sure the loop iterates at most
1784 I_BOUND times. */
1786 void
1787 record_niter_bound (struct loop *loop, const widest_int &i_bound,
1788 bool realistic, bool upper)
1790 /* Update the bounds only when there is no previous estimation, or when the
1791 current estimation is smaller. */
1792 if (upper
1793 && (!loop->any_upper_bound
1794 || wi::ltu_p (i_bound, loop->nb_iterations_upper_bound)))
1796 loop->any_upper_bound = true;
1797 loop->nb_iterations_upper_bound = i_bound;
1798 if (!loop->any_likely_upper_bound)
1800 loop->any_likely_upper_bound = true;
1801 loop->nb_iterations_likely_upper_bound = i_bound;
1804 if (realistic
1805 && (!loop->any_estimate
1806 || wi::ltu_p (i_bound, loop->nb_iterations_estimate)))
1808 loop->any_estimate = true;
1809 loop->nb_iterations_estimate = i_bound;
1811 if (!realistic
1812 && (!loop->any_likely_upper_bound
1813 || wi::ltu_p (i_bound, loop->nb_iterations_likely_upper_bound)))
1815 loop->any_likely_upper_bound = true;
1816 loop->nb_iterations_likely_upper_bound = i_bound;
1819 /* If an upper bound is smaller than the realistic estimate of the
1820 number of iterations, use the upper bound instead. */
1821 if (loop->any_upper_bound
1822 && loop->any_estimate
1823 && wi::ltu_p (loop->nb_iterations_upper_bound,
1824 loop->nb_iterations_estimate))
1825 loop->nb_iterations_estimate = loop->nb_iterations_upper_bound;
1826 if (loop->any_upper_bound
1827 && loop->any_likely_upper_bound
1828 && wi::ltu_p (loop->nb_iterations_upper_bound,
1829 loop->nb_iterations_likely_upper_bound))
1830 loop->nb_iterations_likely_upper_bound = loop->nb_iterations_upper_bound;
1833 /* Similar to get_estimated_loop_iterations, but returns the estimate only
1834 if it fits to HOST_WIDE_INT. If this is not the case, or the estimate
1835 on the number of iterations of LOOP could not be derived, returns -1. */
1837 HOST_WIDE_INT
1838 get_estimated_loop_iterations_int (struct loop *loop)
1840 widest_int nit;
1841 HOST_WIDE_INT hwi_nit;
1843 if (!get_estimated_loop_iterations (loop, &nit))
1844 return -1;
1846 if (!wi::fits_shwi_p (nit))
1847 return -1;
1848 hwi_nit = nit.to_shwi ();
1850 return hwi_nit < 0 ? -1 : hwi_nit;
1853 /* Returns an upper bound on the number of executions of statements
1854 in the LOOP. For statements before the loop exit, this exceeds
1855 the number of execution of the latch by one. */
1857 HOST_WIDE_INT
1858 max_stmt_executions_int (struct loop *loop)
1860 HOST_WIDE_INT nit = get_max_loop_iterations_int (loop);
1861 HOST_WIDE_INT snit;
1863 if (nit == -1)
1864 return -1;
1866 snit = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) nit + 1);
1868 /* If the computation overflows, return -1. */
1869 return snit < 0 ? -1 : snit;
1872 /* Returns an likely upper bound on the number of executions of statements
1873 in the LOOP. For statements before the loop exit, this exceeds
1874 the number of execution of the latch by one. */
1876 HOST_WIDE_INT
1877 likely_max_stmt_executions_int (struct loop *loop)
1879 HOST_WIDE_INT nit = get_likely_max_loop_iterations_int (loop);
1880 HOST_WIDE_INT snit;
1882 if (nit == -1)
1883 return -1;
1885 snit = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) nit + 1);
1887 /* If the computation overflows, return -1. */
1888 return snit < 0 ? -1 : snit;
1891 /* Sets NIT to the estimated number of executions of the latch of the
1892 LOOP. If we have no reliable estimate, the function returns false, otherwise
1893 returns true. */
1895 bool
1896 get_estimated_loop_iterations (struct loop *loop, widest_int *nit)
1898 /* Even if the bound is not recorded, possibly we can derrive one from
1899 profile. */
1900 if (!loop->any_estimate)
1902 if (loop->header->count.reliable_p ())
1904 *nit = gcov_type_to_wide_int
1905 (expected_loop_iterations_unbounded (loop) + 1);
1906 return true;
1908 return false;
1911 *nit = loop->nb_iterations_estimate;
1912 return true;
1915 /* Sets NIT to an upper bound for the maximum number of executions of the
1916 latch of the LOOP. If we have no reliable estimate, the function returns
1917 false, otherwise returns true. */
1919 bool
1920 get_max_loop_iterations (const struct loop *loop, widest_int *nit)
1922 if (!loop->any_upper_bound)
1923 return false;
1925 *nit = loop->nb_iterations_upper_bound;
1926 return true;
1929 /* Similar to get_max_loop_iterations, but returns the estimate only
1930 if it fits to HOST_WIDE_INT. If this is not the case, or the estimate
1931 on the number of iterations of LOOP could not be derived, returns -1. */
1933 HOST_WIDE_INT
1934 get_max_loop_iterations_int (const struct loop *loop)
1936 widest_int nit;
1937 HOST_WIDE_INT hwi_nit;
1939 if (!get_max_loop_iterations (loop, &nit))
1940 return -1;
1942 if (!wi::fits_shwi_p (nit))
1943 return -1;
1944 hwi_nit = nit.to_shwi ();
1946 return hwi_nit < 0 ? -1 : hwi_nit;
1949 /* Sets NIT to an upper bound for the maximum number of executions of the
1950 latch of the LOOP. If we have no reliable estimate, the function returns
1951 false, otherwise returns true. */
1953 bool
1954 get_likely_max_loop_iterations (struct loop *loop, widest_int *nit)
1956 if (!loop->any_likely_upper_bound)
1957 return false;
1959 *nit = loop->nb_iterations_likely_upper_bound;
1960 return true;
1963 /* Similar to get_max_loop_iterations, but returns the estimate only
1964 if it fits to HOST_WIDE_INT. If this is not the case, or the estimate
1965 on the number of iterations of LOOP could not be derived, returns -1. */
1967 HOST_WIDE_INT
1968 get_likely_max_loop_iterations_int (struct loop *loop)
1970 widest_int nit;
1971 HOST_WIDE_INT hwi_nit;
1973 if (!get_likely_max_loop_iterations (loop, &nit))
1974 return -1;
1976 if (!wi::fits_shwi_p (nit))
1977 return -1;
1978 hwi_nit = nit.to_shwi ();
1980 return hwi_nit < 0 ? -1 : hwi_nit;
1983 /* Returns the loop depth of the loop BB belongs to. */
1986 bb_loop_depth (const_basic_block bb)
1988 return bb->loop_father ? loop_depth (bb->loop_father) : 0;
1991 /* Marks LOOP for removal and sets LOOPS_NEED_FIXUP. */
1993 void
1994 mark_loop_for_removal (loop_p loop)
1996 if (loop->header == NULL)
1997 return;
1998 loop->former_header = loop->header;
1999 loop->header = NULL;
2000 loop->latch = NULL;
2001 loops_state_set (LOOPS_NEED_FIXUP);