1 /* Natural loop discovery code for GNU compiler.
2 Copyright (C) 2000-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "basic-block.h"
28 #include "diagnostic-core.h"
31 #include "pointer-set.h"
32 #include "tree-ssa-alias.h"
33 #include "internal-fn.h"
34 #include "gimple-expr.h"
37 #include "gimple-iterator.h"
38 #include "gimple-ssa.h"
41 static void flow_loops_cfg_dump (FILE *);
43 /* Dump loop related CFG information. */
46 flow_loops_cfg_dump (FILE *file
)
53 FOR_EACH_BB_FN (bb
, cfun
)
58 fprintf (file
, ";; %d succs { ", bb
->index
);
59 FOR_EACH_EDGE (succ
, ei
, bb
->succs
)
60 fprintf (file
, "%d ", succ
->dest
->index
);
61 fprintf (file
, "}\n");
65 /* Return nonzero if the nodes of LOOP are a subset of OUTER. */
68 flow_loop_nested_p (const struct loop
*outer
, const struct loop
*loop
)
70 unsigned odepth
= loop_depth (outer
);
72 return (loop_depth (loop
) > odepth
73 && (*loop
->superloops
)[odepth
] == outer
);
76 /* Returns the loop such that LOOP is nested DEPTH (indexed from zero)
80 superloop_at_depth (struct loop
*loop
, unsigned depth
)
82 unsigned ldepth
= loop_depth (loop
);
84 gcc_assert (depth
<= ldepth
);
89 return (*loop
->superloops
)[depth
];
92 /* Returns the list of the latch edges of LOOP. */
95 get_loop_latch_edges (const struct loop
*loop
)
99 vec
<edge
> ret
= vNULL
;
101 FOR_EACH_EDGE (e
, ei
, loop
->header
->preds
)
103 if (dominated_by_p (CDI_DOMINATORS
, e
->src
, loop
->header
))
110 /* Dump the loop information specified by LOOP to the stream FILE
111 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
114 flow_loop_dump (const struct loop
*loop
, FILE *file
,
115 void (*loop_dump_aux
) (const struct loop
*, FILE *, int),
123 if (! loop
|| ! loop
->header
)
126 fprintf (file
, ";;\n;; Loop %d\n", loop
->num
);
128 fprintf (file
, ";; header %d, ", loop
->header
->index
);
130 fprintf (file
, "latch %d\n", loop
->latch
->index
);
133 fprintf (file
, "multiple latches:");
134 latches
= get_loop_latch_edges (loop
);
135 FOR_EACH_VEC_ELT (latches
, i
, e
)
136 fprintf (file
, " %d", e
->src
->index
);
138 fprintf (file
, "\n");
141 fprintf (file
, ";; depth %d, outer %ld\n",
142 loop_depth (loop
), (long) (loop_outer (loop
)
143 ? loop_outer (loop
)->num
: -1));
145 fprintf (file
, ";; nodes:");
146 bbs
= get_loop_body (loop
);
147 for (i
= 0; i
< loop
->num_nodes
; i
++)
148 fprintf (file
, " %d", bbs
[i
]->index
);
150 fprintf (file
, "\n");
153 loop_dump_aux (loop
, file
, verbose
);
156 /* Dump the loop information about loops to the stream FILE,
157 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
160 flow_loops_dump (FILE *file
, void (*loop_dump_aux
) (const struct loop
*, FILE *, int), int verbose
)
164 if (!current_loops
|| ! file
)
167 fprintf (file
, ";; %d loops found\n", number_of_loops (cfun
));
169 FOR_EACH_LOOP (loop
, LI_INCLUDE_ROOT
)
171 flow_loop_dump (loop
, file
, loop_dump_aux
, verbose
);
175 flow_loops_cfg_dump (file
);
178 /* Free data allocated for LOOP. */
181 flow_loop_free (struct loop
*loop
)
183 struct loop_exit
*exit
, *next
;
185 vec_free (loop
->superloops
);
187 /* Break the list of the loop exit records. They will be freed when the
188 corresponding edge is rescanned or removed, and this avoids
189 accessing the (already released) head of the list stored in the
191 for (exit
= loop
->exits
->next
; exit
!= loop
->exits
; exit
= next
)
198 ggc_free (loop
->exits
);
202 /* Free all the memory allocated for LOOPS. */
205 flow_loops_free (struct loops
*loops
)
212 /* Free the loop descriptors. */
213 FOR_EACH_VEC_SAFE_ELT (loops
->larray
, i
, loop
)
218 flow_loop_free (loop
);
221 vec_free (loops
->larray
);
225 /* Find the nodes contained within the LOOP with header HEADER.
226 Return the number of nodes within the loop. */
229 flow_loop_nodes_find (basic_block header
, struct loop
*loop
)
231 vec
<basic_block
> stack
= vNULL
;
234 edge_iterator latch_ei
;
236 header
->loop_father
= loop
;
238 FOR_EACH_EDGE (latch
, latch_ei
, loop
->header
->preds
)
240 if (latch
->src
->loop_father
== loop
241 || !dominated_by_p (CDI_DOMINATORS
, latch
->src
, loop
->header
))
245 stack
.safe_push (latch
->src
);
246 latch
->src
->loop_father
= loop
;
248 while (!stack
.is_empty ())
256 FOR_EACH_EDGE (e
, ei
, node
->preds
)
258 basic_block ancestor
= e
->src
;
260 if (ancestor
->loop_father
!= loop
)
262 ancestor
->loop_father
= loop
;
264 stack
.safe_push (ancestor
);
274 /* Records the vector of superloops of the loop LOOP, whose immediate
275 superloop is FATHER. */
278 establish_preds (struct loop
*loop
, struct loop
*father
)
281 unsigned depth
= loop_depth (father
) + 1;
284 loop
->superloops
= 0;
285 vec_alloc (loop
->superloops
, depth
);
286 FOR_EACH_VEC_SAFE_ELT (father
->superloops
, i
, ploop
)
287 loop
->superloops
->quick_push (ploop
);
288 loop
->superloops
->quick_push (father
);
290 for (ploop
= loop
->inner
; ploop
; ploop
= ploop
->next
)
291 establish_preds (ploop
, loop
);
294 /* Add LOOP to the loop hierarchy tree where FATHER is father of the
295 added loop. If LOOP has some children, take care of that their
296 pred field will be initialized correctly. */
299 flow_loop_tree_node_add (struct loop
*father
, struct loop
*loop
)
301 loop
->next
= father
->inner
;
302 father
->inner
= loop
;
304 establish_preds (loop
, father
);
307 /* Remove LOOP from the loop hierarchy tree. */
310 flow_loop_tree_node_remove (struct loop
*loop
)
312 struct loop
*prev
, *father
;
314 father
= loop_outer (loop
);
316 /* Remove loop from the list of sons. */
317 if (father
->inner
== loop
)
318 father
->inner
= loop
->next
;
321 for (prev
= father
->inner
; prev
->next
!= loop
; prev
= prev
->next
)
323 prev
->next
= loop
->next
;
326 loop
->superloops
= NULL
;
329 /* Allocates and returns new loop structure. */
334 struct loop
*loop
= ggc_alloc_cleared_loop ();
336 loop
->exits
= ggc_alloc_cleared_loop_exit ();
337 loop
->exits
->next
= loop
->exits
->prev
= loop
->exits
;
338 loop
->can_be_parallel
= false;
343 /* Initializes loops structure LOOPS, reserving place for NUM_LOOPS loops
344 (including the root of the loop tree). */
347 init_loops_structure (struct function
*fn
,
348 struct loops
*loops
, unsigned num_loops
)
352 memset (loops
, 0, sizeof *loops
);
353 vec_alloc (loops
->larray
, num_loops
);
355 /* Dummy loop containing whole function. */
356 root
= alloc_loop ();
357 root
->num_nodes
= n_basic_blocks_for_fn (fn
);
358 root
->latch
= EXIT_BLOCK_PTR_FOR_FN (fn
);
359 root
->header
= ENTRY_BLOCK_PTR_FOR_FN (fn
);
360 ENTRY_BLOCK_PTR_FOR_FN (fn
)->loop_father
= root
;
361 EXIT_BLOCK_PTR_FOR_FN (fn
)->loop_father
= root
;
363 loops
->larray
->quick_push (root
);
364 loops
->tree_root
= root
;
367 /* Returns whether HEADER is a loop header. */
370 bb_loop_header_p (basic_block header
)
375 /* If we have an abnormal predecessor, do not consider the
376 loop (not worth the problems). */
377 if (bb_has_abnormal_pred (header
))
380 /* Look for back edges where a predecessor is dominated
381 by this block. A natural loop has a single entry
382 node (header) that dominates all the nodes in the
383 loop. It also has single back edge to the header
384 from a latch node. */
385 FOR_EACH_EDGE (e
, ei
, header
->preds
)
387 basic_block latch
= e
->src
;
388 if (latch
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
389 && dominated_by_p (CDI_DOMINATORS
, latch
, header
))
396 /* Find all the natural loops in the function and save in LOOPS structure and
397 recalculate loop_father information in basic block structures.
398 If LOOPS is non-NULL then the loop structures for already recorded loops
399 will be re-used and their number will not change. We assume that no
400 stale loops exist in LOOPS.
401 When LOOPS is NULL it is allocated and re-built from scratch.
402 Return the built LOOPS structure. */
405 flow_loops_find (struct loops
*loops
)
407 bool from_scratch
= (loops
== NULL
);
412 /* Ensure that the dominators are computed. */
413 calculate_dominance_info (CDI_DOMINATORS
);
417 loops
= ggc_alloc_cleared_loops ();
418 init_loops_structure (cfun
, loops
, 1);
421 /* Ensure that loop exits were released. */
422 gcc_assert (loops
->exits
== NULL
);
424 /* Taking care of this degenerate case makes the rest of
425 this code simpler. */
426 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
429 /* The root loop node contains all basic-blocks. */
430 loops
->tree_root
->num_nodes
= n_basic_blocks_for_fn (cfun
);
432 /* Compute depth first search order of the CFG so that outer
433 natural loops will be found before inner natural loops. */
434 rc_order
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
));
435 pre_and_rev_post_order_compute (NULL
, rc_order
, false);
437 /* Gather all loop headers in reverse completion order and allocate
438 loop structures for loops that are not already present. */
439 auto_vec
<loop_p
> larray (loops
->larray
->length ());
440 for (b
= 0; b
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; b
++)
442 basic_block header
= BASIC_BLOCK_FOR_FN (cfun
, rc_order
[b
]);
443 if (bb_loop_header_p (header
))
447 /* The current active loop tree has valid loop-fathers for
450 && header
->loop_father
->header
== header
)
452 loop
= header
->loop_father
;
453 /* If we found an existing loop remove it from the
454 loop tree. It is going to be inserted again
456 flow_loop_tree_node_remove (loop
);
460 /* Otherwise allocate a new loop structure for the loop. */
461 loop
= alloc_loop ();
462 /* ??? We could re-use unused loop slots here. */
463 loop
->num
= loops
->larray
->length ();
464 vec_safe_push (loops
->larray
, loop
);
465 loop
->header
= header
;
468 && dump_file
&& (dump_flags
& TDF_DETAILS
))
469 fprintf (dump_file
, "flow_loops_find: discovered new "
470 "loop %d with header %d\n",
471 loop
->num
, header
->index
);
473 /* Reset latch, we recompute it below. */
475 larray
.safe_push (loop
);
478 /* Make blocks part of the loop root node at start. */
479 header
->loop_father
= loops
->tree_root
;
484 /* Now iterate over the loops found, insert them into the loop tree
485 and assign basic-block ownership. */
486 for (i
= 0; i
< larray
.length (); ++i
)
488 struct loop
*loop
= larray
[i
];
489 basic_block header
= loop
->header
;
493 flow_loop_tree_node_add (header
->loop_father
, loop
);
494 loop
->num_nodes
= flow_loop_nodes_find (loop
->header
, loop
);
496 /* Look for the latch for this header block, if it has just a
498 FOR_EACH_EDGE (e
, ei
, header
->preds
)
500 basic_block latch
= e
->src
;
502 if (flow_bb_inside_loop_p (loop
, latch
))
504 if (loop
->latch
!= NULL
)
506 /* More than one latch edge. */
518 /* Ratio of frequencies of edges so that one of more latch edges is
519 considered to belong to inner loop with same header. */
520 #define HEAVY_EDGE_RATIO 8
522 /* Minimum number of samples for that we apply
523 find_subloop_latch_edge_by_profile heuristics. */
524 #define HEAVY_EDGE_MIN_SAMPLES 10
526 /* If the profile info is available, finds an edge in LATCHES that much more
527 frequent than the remaining edges. Returns such an edge, or NULL if we do
530 We do not use guessed profile here, only the measured one. The guessed
531 profile is usually too flat and unreliable for this (and it is mostly based
532 on the loop structure of the program, so it does not make much sense to
533 derive the loop structure from it). */
536 find_subloop_latch_edge_by_profile (vec
<edge
> latches
)
540 gcov_type mcount
= 0, tcount
= 0;
542 FOR_EACH_VEC_ELT (latches
, i
, e
)
544 if (e
->count
> mcount
)
552 if (tcount
< HEAVY_EDGE_MIN_SAMPLES
553 || (tcount
- mcount
) * HEAVY_EDGE_RATIO
> tcount
)
558 "Found latch edge %d -> %d using profile information.\n",
559 me
->src
->index
, me
->dest
->index
);
563 /* Among LATCHES, guesses a latch edge of LOOP corresponding to subloop, based
564 on the structure of induction variables. Returns this edge, or NULL if we
567 We are quite conservative, and look just for an obvious simple innermost
568 loop (which is the case where we would lose the most performance by not
569 disambiguating the loop). More precisely, we look for the following
570 situation: The source of the chosen latch edge dominates sources of all
571 the other latch edges. Additionally, the header does not contain a phi node
572 such that the argument from the chosen edge is equal to the argument from
576 find_subloop_latch_edge_by_ivs (struct loop
*loop ATTRIBUTE_UNUSED
, vec
<edge
> latches
)
578 edge e
, latch
= latches
[0];
581 gimple_stmt_iterator psi
;
585 /* Find the candidate for the latch edge. */
586 for (i
= 1; latches
.iterate (i
, &e
); i
++)
587 if (dominated_by_p (CDI_DOMINATORS
, latch
->src
, e
->src
))
590 /* Verify that it dominates all the latch edges. */
591 FOR_EACH_VEC_ELT (latches
, i
, e
)
592 if (!dominated_by_p (CDI_DOMINATORS
, e
->src
, latch
->src
))
595 /* Check for a phi node that would deny that this is a latch edge of
597 for (psi
= gsi_start_phis (loop
->header
); !gsi_end_p (psi
); gsi_next (&psi
))
599 phi
= gsi_stmt (psi
);
600 lop
= PHI_ARG_DEF_FROM_EDGE (phi
, latch
);
602 /* Ignore the values that are not changed inside the subloop. */
603 if (TREE_CODE (lop
) != SSA_NAME
604 || SSA_NAME_DEF_STMT (lop
) == phi
)
606 bb
= gimple_bb (SSA_NAME_DEF_STMT (lop
));
607 if (!bb
|| !flow_bb_inside_loop_p (loop
, bb
))
610 FOR_EACH_VEC_ELT (latches
, i
, e
)
612 && PHI_ARG_DEF_FROM_EDGE (phi
, e
) == lop
)
618 "Found latch edge %d -> %d using iv structure.\n",
619 latch
->src
->index
, latch
->dest
->index
);
623 /* If we can determine that one of the several latch edges of LOOP behaves
624 as a latch edge of a separate subloop, returns this edge. Otherwise
628 find_subloop_latch_edge (struct loop
*loop
)
630 vec
<edge
> latches
= get_loop_latch_edges (loop
);
633 if (latches
.length () > 1)
635 latch
= find_subloop_latch_edge_by_profile (latches
);
638 /* We consider ivs to guess the latch edge only in SSA. Perhaps we
639 should use cfghook for this, but it is hard to imagine it would
640 be useful elsewhere. */
641 && current_ir_type () == IR_GIMPLE
)
642 latch
= find_subloop_latch_edge_by_ivs (loop
, latches
);
649 /* Callback for make_forwarder_block. Returns true if the edge E is marked
650 in the set MFB_REIS_SET. */
652 static struct pointer_set_t
*mfb_reis_set
;
654 mfb_redirect_edges_in_set (edge e
)
656 return pointer_set_contains (mfb_reis_set
, e
);
659 /* Creates a subloop of LOOP with latch edge LATCH. */
662 form_subloop (struct loop
*loop
, edge latch
)
666 struct loop
*new_loop
;
668 mfb_reis_set
= pointer_set_create ();
669 FOR_EACH_EDGE (e
, ei
, loop
->header
->preds
)
672 pointer_set_insert (mfb_reis_set
, e
);
674 new_entry
= make_forwarder_block (loop
->header
, mfb_redirect_edges_in_set
,
676 pointer_set_destroy (mfb_reis_set
);
678 loop
->header
= new_entry
->src
;
680 /* Find the blocks and subloops that belong to the new loop, and add it to
681 the appropriate place in the loop tree. */
682 new_loop
= alloc_loop ();
683 new_loop
->header
= new_entry
->dest
;
684 new_loop
->latch
= latch
->src
;
685 add_loop (new_loop
, loop
);
688 /* Make all the latch edges of LOOP to go to a single forwarder block --
689 a new latch of LOOP. */
692 merge_latch_edges (struct loop
*loop
)
694 vec
<edge
> latches
= get_loop_latch_edges (loop
);
698 gcc_assert (latches
.length () > 0);
700 if (latches
.length () == 1)
701 loop
->latch
= latches
[0]->src
;
705 fprintf (dump_file
, "Merged latch edges of loop %d\n", loop
->num
);
707 mfb_reis_set
= pointer_set_create ();
708 FOR_EACH_VEC_ELT (latches
, i
, e
)
709 pointer_set_insert (mfb_reis_set
, e
);
710 latch
= make_forwarder_block (loop
->header
, mfb_redirect_edges_in_set
,
712 pointer_set_destroy (mfb_reis_set
);
714 loop
->header
= latch
->dest
;
715 loop
->latch
= latch
->src
;
721 /* LOOP may have several latch edges. Transform it into (possibly several)
722 loops with single latch edge. */
725 disambiguate_multiple_latches (struct loop
*loop
)
729 /* We eliminate the multiple latches by splitting the header to the forwarder
730 block F and the rest R, and redirecting the edges. There are two cases:
732 1) If there is a latch edge E that corresponds to a subloop (we guess
733 that based on profile -- if it is taken much more often than the
734 remaining edges; and on trees, using the information about induction
735 variables of the loops), we redirect E to R, all the remaining edges to
736 F, then rescan the loops and try again for the outer loop.
737 2) If there is no such edge, we redirect all latch edges to F, and the
738 entry edges to R, thus making F the single latch of the loop. */
741 fprintf (dump_file
, "Disambiguating loop %d with multiple latches\n",
744 /* During latch merging, we may need to redirect the entry edges to a new
745 block. This would cause problems if the entry edge was the one from the
746 entry block. To avoid having to handle this case specially, split
748 e
= find_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), loop
->header
);
754 e
= find_subloop_latch_edge (loop
);
758 form_subloop (loop
, e
);
761 merge_latch_edges (loop
);
764 /* Split loops with multiple latch edges. */
767 disambiguate_loops_with_multiple_latches (void)
771 FOR_EACH_LOOP (loop
, 0)
774 disambiguate_multiple_latches (loop
);
778 /* Return nonzero if basic block BB belongs to LOOP. */
780 flow_bb_inside_loop_p (const struct loop
*loop
, const_basic_block bb
)
782 struct loop
*source_loop
;
784 if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
785 || bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
788 source_loop
= bb
->loop_father
;
789 return loop
== source_loop
|| flow_loop_nested_p (loop
, source_loop
);
792 /* Enumeration predicate for get_loop_body_with_size. */
794 glb_enum_p (const_basic_block bb
, const void *glb_loop
)
796 const struct loop
*const loop
= (const struct loop
*) glb_loop
;
797 return (bb
!= loop
->header
798 && dominated_by_p (CDI_DOMINATORS
, bb
, loop
->header
));
801 /* Gets basic blocks of a LOOP. Header is the 0-th block, rest is in dfs
802 order against direction of edges from latch. Specially, if
803 header != latch, latch is the 1-st block. LOOP cannot be the fake
804 loop tree root, and its size must be at most MAX_SIZE. The blocks
805 in the LOOP body are stored to BODY, and the size of the LOOP is
809 get_loop_body_with_size (const struct loop
*loop
, basic_block
*body
,
812 return dfs_enumerate_from (loop
->header
, 1, glb_enum_p
,
813 body
, max_size
, loop
);
816 /* Gets basic blocks of a LOOP. Header is the 0-th block, rest is in dfs
817 order against direction of edges from latch. Specially, if
818 header != latch, latch is the 1-st block. */
821 get_loop_body (const struct loop
*loop
)
823 basic_block
*body
, bb
;
826 gcc_assert (loop
->num_nodes
);
828 body
= XNEWVEC (basic_block
, loop
->num_nodes
);
830 if (loop
->latch
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
832 /* There may be blocks unreachable from EXIT_BLOCK, hence we need to
833 special-case the fake loop that contains the whole function. */
834 gcc_assert (loop
->num_nodes
== (unsigned) n_basic_blocks_for_fn (cfun
));
835 body
[tv
++] = loop
->header
;
836 body
[tv
++] = EXIT_BLOCK_PTR_FOR_FN (cfun
);
837 FOR_EACH_BB_FN (bb
, cfun
)
841 tv
= get_loop_body_with_size (loop
, body
, loop
->num_nodes
);
843 gcc_assert (tv
== loop
->num_nodes
);
847 /* Fills dominance descendants inside LOOP of the basic block BB into
848 array TOVISIT from index *TV. */
851 fill_sons_in_loop (const struct loop
*loop
, basic_block bb
,
852 basic_block
*tovisit
, int *tv
)
854 basic_block son
, postpone
= NULL
;
856 tovisit
[(*tv
)++] = bb
;
857 for (son
= first_dom_son (CDI_DOMINATORS
, bb
);
859 son
= next_dom_son (CDI_DOMINATORS
, son
))
861 if (!flow_bb_inside_loop_p (loop
, son
))
864 if (dominated_by_p (CDI_DOMINATORS
, loop
->latch
, son
))
869 fill_sons_in_loop (loop
, son
, tovisit
, tv
);
873 fill_sons_in_loop (loop
, postpone
, tovisit
, tv
);
876 /* Gets body of a LOOP (that must be different from the outermost loop)
877 sorted by dominance relation. Additionally, if a basic block s dominates
878 the latch, then only blocks dominated by s are be after it. */
881 get_loop_body_in_dom_order (const struct loop
*loop
)
883 basic_block
*tovisit
;
886 gcc_assert (loop
->num_nodes
);
888 tovisit
= XNEWVEC (basic_block
, loop
->num_nodes
);
890 gcc_assert (loop
->latch
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
893 fill_sons_in_loop (loop
, loop
->header
, tovisit
, &tv
);
895 gcc_assert (tv
== (int) loop
->num_nodes
);
900 /* Gets body of a LOOP sorted via provided BB_COMPARATOR. */
903 get_loop_body_in_custom_order (const struct loop
*loop
,
904 int (*bb_comparator
) (const void *, const void *))
906 basic_block
*bbs
= get_loop_body (loop
);
908 qsort (bbs
, loop
->num_nodes
, sizeof (basic_block
), bb_comparator
);
913 /* Get body of a LOOP in breadth first sort order. */
916 get_loop_body_in_bfs_order (const struct loop
*loop
)
924 gcc_assert (loop
->num_nodes
);
925 gcc_assert (loop
->latch
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
927 blocks
= XNEWVEC (basic_block
, loop
->num_nodes
);
928 visited
= BITMAP_ALLOC (NULL
);
931 while (i
< loop
->num_nodes
)
936 if (bitmap_set_bit (visited
, bb
->index
))
937 /* This basic block is now visited */
940 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
942 if (flow_bb_inside_loop_p (loop
, e
->dest
))
944 if (bitmap_set_bit (visited
, e
->dest
->index
))
945 blocks
[i
++] = e
->dest
;
949 gcc_assert (i
>= vc
);
954 BITMAP_FREE (visited
);
958 /* Hash function for struct loop_exit. */
961 loop_exit_hash (const void *ex
)
963 const struct loop_exit
*const exit
= (const struct loop_exit
*) ex
;
965 return htab_hash_pointer (exit
->e
);
968 /* Equality function for struct loop_exit. Compares with edge. */
971 loop_exit_eq (const void *ex
, const void *e
)
973 const struct loop_exit
*const exit
= (const struct loop_exit
*) ex
;
978 /* Frees the list of loop exit descriptions EX. */
981 loop_exit_free (void *ex
)
983 struct loop_exit
*exit
= (struct loop_exit
*) ex
, *next
;
985 for (; exit
; exit
= next
)
989 exit
->next
->prev
= exit
->prev
;
990 exit
->prev
->next
= exit
->next
;
996 /* Returns the list of records for E as an exit of a loop. */
998 static struct loop_exit
*
999 get_exit_descriptions (edge e
)
1001 return (struct loop_exit
*) htab_find_with_hash (current_loops
->exits
, e
,
1002 htab_hash_pointer (e
));
1005 /* Updates the lists of loop exits in that E appears.
1006 If REMOVED is true, E is being removed, and we
1007 just remove it from the lists of exits.
1008 If NEW_EDGE is true and E is not a loop exit, we
1009 do not try to remove it from loop exit lists. */
1012 rescan_loop_exit (edge e
, bool new_edge
, bool removed
)
1015 struct loop_exit
*exits
= NULL
, *exit
;
1016 struct loop
*aloop
, *cloop
;
1018 if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS
))
1022 && e
->src
->loop_father
!= NULL
1023 && e
->dest
->loop_father
!= NULL
1024 && !flow_bb_inside_loop_p (e
->src
->loop_father
, e
->dest
))
1026 cloop
= find_common_loop (e
->src
->loop_father
, e
->dest
->loop_father
);
1027 for (aloop
= e
->src
->loop_father
;
1029 aloop
= loop_outer (aloop
))
1031 exit
= ggc_alloc_loop_exit ();
1034 exit
->next
= aloop
->exits
->next
;
1035 exit
->prev
= aloop
->exits
;
1036 exit
->next
->prev
= exit
;
1037 exit
->prev
->next
= exit
;
1039 exit
->next_e
= exits
;
1044 if (!exits
&& new_edge
)
1047 slot
= htab_find_slot_with_hash (current_loops
->exits
, e
,
1048 htab_hash_pointer (e
),
1049 exits
? INSERT
: NO_INSERT
);
1056 loop_exit_free (*slot
);
1060 htab_clear_slot (current_loops
->exits
, slot
);
1063 /* For each loop, record list of exit edges, and start maintaining these
1067 record_loop_exits (void)
1076 if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS
))
1078 loops_state_set (LOOPS_HAVE_RECORDED_EXITS
);
1080 gcc_assert (current_loops
->exits
== NULL
);
1081 current_loops
->exits
= htab_create_ggc (2 * number_of_loops (cfun
),
1082 loop_exit_hash
, loop_exit_eq
,
1085 FOR_EACH_BB_FN (bb
, cfun
)
1087 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1089 rescan_loop_exit (e
, true, false);
1094 /* Dumps information about the exit in *SLOT to FILE.
1095 Callback for htab_traverse. */
1098 dump_recorded_exit (void **slot
, void *file
)
1100 struct loop_exit
*exit
= (struct loop_exit
*) *slot
;
1104 for (; exit
!= NULL
; exit
= exit
->next_e
)
1107 fprintf ((FILE*) file
, "Edge %d->%d exits %u loops\n",
1108 e
->src
->index
, e
->dest
->index
, n
);
1113 /* Dumps the recorded exits of loops to FILE. */
1115 extern void dump_recorded_exits (FILE *);
1117 dump_recorded_exits (FILE *file
)
1119 if (!current_loops
->exits
)
1121 htab_traverse (current_loops
->exits
, dump_recorded_exit
, file
);
1124 /* Releases lists of loop exits. */
1127 release_recorded_exits (void)
1129 gcc_assert (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS
));
1130 htab_delete (current_loops
->exits
);
1131 current_loops
->exits
= NULL
;
1132 loops_state_clear (LOOPS_HAVE_RECORDED_EXITS
);
1135 /* Returns the list of the exit edges of a LOOP. */
1138 get_loop_exit_edges (const struct loop
*loop
)
1140 vec
<edge
> edges
= vNULL
;
1145 struct loop_exit
*exit
;
1147 gcc_assert (loop
->latch
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
1149 /* If we maintain the lists of exits, use them. Otherwise we must
1150 scan the body of the loop. */
1151 if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS
))
1153 for (exit
= loop
->exits
->next
; exit
->e
; exit
= exit
->next
)
1154 edges
.safe_push (exit
->e
);
1158 body
= get_loop_body (loop
);
1159 for (i
= 0; i
< loop
->num_nodes
; i
++)
1160 FOR_EACH_EDGE (e
, ei
, body
[i
]->succs
)
1162 if (!flow_bb_inside_loop_p (loop
, e
->dest
))
1163 edges
.safe_push (e
);
1171 /* Counts the number of conditional branches inside LOOP. */
1174 num_loop_branches (const struct loop
*loop
)
1179 gcc_assert (loop
->latch
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
1181 body
= get_loop_body (loop
);
1183 for (i
= 0; i
< loop
->num_nodes
; i
++)
1184 if (EDGE_COUNT (body
[i
]->succs
) >= 2)
1191 /* Adds basic block BB to LOOP. */
1193 add_bb_to_loop (basic_block bb
, struct loop
*loop
)
1200 gcc_assert (bb
->loop_father
== NULL
);
1201 bb
->loop_father
= loop
;
1203 FOR_EACH_VEC_SAFE_ELT (loop
->superloops
, i
, ploop
)
1206 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1208 rescan_loop_exit (e
, true, false);
1210 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1212 rescan_loop_exit (e
, true, false);
1216 /* Remove basic block BB from loops. */
1218 remove_bb_from_loops (basic_block bb
)
1221 struct loop
*loop
= bb
->loop_father
;
1226 gcc_assert (loop
!= NULL
);
1228 FOR_EACH_VEC_SAFE_ELT (loop
->superloops
, i
, ploop
)
1230 bb
->loop_father
= NULL
;
1232 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1234 rescan_loop_exit (e
, false, true);
1236 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1238 rescan_loop_exit (e
, false, true);
1242 /* Finds nearest common ancestor in loop tree for given loops. */
1244 find_common_loop (struct loop
*loop_s
, struct loop
*loop_d
)
1246 unsigned sdepth
, ddepth
;
1248 if (!loop_s
) return loop_d
;
1249 if (!loop_d
) return loop_s
;
1251 sdepth
= loop_depth (loop_s
);
1252 ddepth
= loop_depth (loop_d
);
1254 if (sdepth
< ddepth
)
1255 loop_d
= (*loop_d
->superloops
)[sdepth
];
1256 else if (sdepth
> ddepth
)
1257 loop_s
= (*loop_s
->superloops
)[ddepth
];
1259 while (loop_s
!= loop_d
)
1261 loop_s
= loop_outer (loop_s
);
1262 loop_d
= loop_outer (loop_d
);
1267 /* Removes LOOP from structures and frees its data. */
1270 delete_loop (struct loop
*loop
)
1272 /* Remove the loop from structure. */
1273 flow_loop_tree_node_remove (loop
);
1275 /* Remove loop from loops array. */
1276 (*current_loops
->larray
)[loop
->num
] = NULL
;
1278 /* Free loop data. */
1279 flow_loop_free (loop
);
1282 /* Cancels the LOOP; it must be innermost one. */
1285 cancel_loop (struct loop
*loop
)
1289 struct loop
*outer
= loop_outer (loop
);
1291 gcc_assert (!loop
->inner
);
1293 /* Move blocks up one level (they should be removed as soon as possible). */
1294 bbs
= get_loop_body (loop
);
1295 for (i
= 0; i
< loop
->num_nodes
; i
++)
1296 bbs
[i
]->loop_father
= outer
;
1302 /* Cancels LOOP and all its subloops. */
1304 cancel_loop_tree (struct loop
*loop
)
1307 cancel_loop_tree (loop
->inner
);
1311 /* Checks that information about loops is correct
1312 -- sizes of loops are all right
1313 -- results of get_loop_body really belong to the loop
1314 -- loop header have just single entry edge and single latch edge
1315 -- loop latches have only single successor that is header of their loop
1316 -- irreducible loops are correctly marked
1317 -- the cached loop depth and loop father of each bb is correct
1320 verify_loop_structure (void)
1322 unsigned *sizes
, i
, j
;
1324 basic_block bb
, *bbs
;
1328 unsigned num
= number_of_loops (cfun
);
1329 struct loop_exit
*exit
, *mexit
;
1330 bool dom_available
= dom_info_available_p (CDI_DOMINATORS
);
1333 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP
))
1335 error ("loop verification on loop tree that needs fixup");
1339 /* We need up-to-date dominators, compute or verify them. */
1341 calculate_dominance_info (CDI_DOMINATORS
);
1343 verify_dominators (CDI_DOMINATORS
);
1345 /* Check the headers. */
1346 FOR_EACH_BB_FN (bb
, cfun
)
1347 if (bb_loop_header_p (bb
))
1349 if (bb
->loop_father
->header
== NULL
)
1351 error ("loop with header %d marked for removal", bb
->index
);
1354 else if (bb
->loop_father
->header
!= bb
)
1356 error ("loop with header %d not in loop tree", bb
->index
);
1360 else if (bb
->loop_father
->header
== bb
)
1362 error ("non-loop with header %d not marked for removal", bb
->index
);
1366 /* Check the recorded loop father and sizes of loops. */
1367 visited
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
1368 bitmap_clear (visited
);
1369 bbs
= XNEWVEC (basic_block
, n_basic_blocks_for_fn (cfun
));
1370 FOR_EACH_LOOP (loop
, LI_FROM_INNERMOST
)
1374 if (loop
->header
== NULL
)
1376 error ("removed loop %d in loop tree", loop
->num
);
1381 n
= get_loop_body_with_size (loop
, bbs
, n_basic_blocks_for_fn (cfun
));
1382 if (loop
->num_nodes
!= n
)
1384 error ("size of loop %d should be %d, not %d",
1385 loop
->num
, n
, loop
->num_nodes
);
1389 for (j
= 0; j
< n
; j
++)
1393 if (!flow_bb_inside_loop_p (loop
, bb
))
1395 error ("bb %d does not belong to loop %d",
1396 bb
->index
, loop
->num
);
1400 /* Ignore this block if it is in an inner loop. */
1401 if (bitmap_bit_p (visited
, bb
->index
))
1403 bitmap_set_bit (visited
, bb
->index
);
1405 if (bb
->loop_father
!= loop
)
1407 error ("bb %d has father loop %d, should be loop %d",
1408 bb
->index
, bb
->loop_father
->num
, loop
->num
);
1414 sbitmap_free (visited
);
1416 /* Check headers and latches. */
1417 FOR_EACH_LOOP (loop
, 0)
1420 if (loop
->header
== NULL
)
1422 if (!bb_loop_header_p (loop
->header
))
1424 error ("loop %d%'s header is not a loop header", i
);
1427 if (loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS
)
1428 && EDGE_COUNT (loop
->header
->preds
) != 2)
1430 error ("loop %d%'s header does not have exactly 2 entries", i
);
1435 if (!find_edge (loop
->latch
, loop
->header
))
1437 error ("loop %d%'s latch does not have an edge to its header", i
);
1440 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, loop
->header
))
1442 error ("loop %d%'s latch is not dominated by its header", i
);
1446 if (loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
))
1448 if (!single_succ_p (loop
->latch
))
1450 error ("loop %d%'s latch does not have exactly 1 successor", i
);
1453 if (single_succ (loop
->latch
) != loop
->header
)
1455 error ("loop %d%'s latch does not have header as successor", i
);
1458 if (loop
->latch
->loop_father
!= loop
)
1460 error ("loop %d%'s latch does not belong directly to it", i
);
1464 if (loop
->header
->loop_father
!= loop
)
1466 error ("loop %d%'s header does not belong directly to it", i
);
1469 if (loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS
)
1470 && (loop_latch_edge (loop
)->flags
& EDGE_IRREDUCIBLE_LOOP
))
1472 error ("loop %d%'s latch is marked as part of irreducible region", i
);
1477 /* Check irreducible loops. */
1478 if (loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS
))
1480 /* Record old info. */
1481 irreds
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
1482 FOR_EACH_BB_FN (bb
, cfun
)
1485 if (bb
->flags
& BB_IRREDUCIBLE_LOOP
)
1486 bitmap_set_bit (irreds
, bb
->index
);
1488 bitmap_clear_bit (irreds
, bb
->index
);
1489 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1490 if (e
->flags
& EDGE_IRREDUCIBLE_LOOP
)
1491 e
->flags
|= EDGE_ALL_FLAGS
+ 1;
1495 mark_irreducible_loops ();
1498 FOR_EACH_BB_FN (bb
, cfun
)
1502 if ((bb
->flags
& BB_IRREDUCIBLE_LOOP
)
1503 && !bitmap_bit_p (irreds
, bb
->index
))
1505 error ("basic block %d should be marked irreducible", bb
->index
);
1508 else if (!(bb
->flags
& BB_IRREDUCIBLE_LOOP
)
1509 && bitmap_bit_p (irreds
, bb
->index
))
1511 error ("basic block %d should not be marked irreducible", bb
->index
);
1514 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1516 if ((e
->flags
& EDGE_IRREDUCIBLE_LOOP
)
1517 && !(e
->flags
& (EDGE_ALL_FLAGS
+ 1)))
1519 error ("edge from %d to %d should be marked irreducible",
1520 e
->src
->index
, e
->dest
->index
);
1523 else if (!(e
->flags
& EDGE_IRREDUCIBLE_LOOP
)
1524 && (e
->flags
& (EDGE_ALL_FLAGS
+ 1)))
1526 error ("edge from %d to %d should not be marked irreducible",
1527 e
->src
->index
, e
->dest
->index
);
1530 e
->flags
&= ~(EDGE_ALL_FLAGS
+ 1);
1536 /* Check the recorded loop exits. */
1537 FOR_EACH_LOOP (loop
, 0)
1539 if (!loop
->exits
|| loop
->exits
->e
!= NULL
)
1541 error ("corrupted head of the exits list of loop %d",
1547 /* Check that the list forms a cycle, and all elements except
1548 for the head are nonnull. */
1549 for (mexit
= loop
->exits
, exit
= mexit
->next
, i
= 0;
1550 exit
->e
&& exit
!= mexit
;
1554 mexit
= mexit
->next
;
1557 if (exit
!= loop
->exits
)
1559 error ("corrupted exits list of loop %d", loop
->num
);
1564 if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS
))
1566 if (loop
->exits
->next
!= loop
->exits
)
1568 error ("nonempty exits list of loop %d, but exits are not recorded",
1575 if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS
))
1577 unsigned n_exits
= 0, eloops
;
1579 sizes
= XCNEWVEC (unsigned, num
);
1580 memset (sizes
, 0, sizeof (unsigned) * num
);
1581 FOR_EACH_BB_FN (bb
, cfun
)
1584 if (bb
->loop_father
== current_loops
->tree_root
)
1586 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1588 if (flow_bb_inside_loop_p (bb
->loop_father
, e
->dest
))
1592 exit
= get_exit_descriptions (e
);
1595 error ("exit %d->%d not recorded",
1596 e
->src
->index
, e
->dest
->index
);
1600 for (; exit
; exit
= exit
->next_e
)
1603 for (loop
= bb
->loop_father
;
1604 loop
!= e
->dest
->loop_father
1605 /* When a loop exit is also an entry edge which
1606 can happen when avoiding CFG manipulations
1607 then the last loop exited is the outer loop
1608 of the loop entered. */
1609 && loop
!= loop_outer (e
->dest
->loop_father
);
1610 loop
= loop_outer (loop
))
1618 error ("wrong list of exited loops for edge %d->%d",
1619 e
->src
->index
, e
->dest
->index
);
1625 if (n_exits
!= htab_elements (current_loops
->exits
))
1627 error ("too many loop exits recorded");
1631 FOR_EACH_LOOP (loop
, 0)
1634 for (exit
= loop
->exits
->next
; exit
->e
; exit
= exit
->next
)
1636 if (eloops
!= sizes
[loop
->num
])
1638 error ("%d exits recorded for loop %d (having %d exits)",
1639 eloops
, loop
->num
, sizes
[loop
->num
]);
1650 free_dominance_info (CDI_DOMINATORS
);
1653 /* Returns latch edge of LOOP. */
1655 loop_latch_edge (const struct loop
*loop
)
1657 return find_edge (loop
->latch
, loop
->header
);
1660 /* Returns preheader edge of LOOP. */
1662 loop_preheader_edge (const struct loop
*loop
)
1667 gcc_assert (loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS
));
1669 FOR_EACH_EDGE (e
, ei
, loop
->header
->preds
)
1670 if (e
->src
!= loop
->latch
)
1676 /* Returns true if E is an exit of LOOP. */
1679 loop_exit_edge_p (const struct loop
*loop
, const_edge e
)
1681 return (flow_bb_inside_loop_p (loop
, e
->src
)
1682 && !flow_bb_inside_loop_p (loop
, e
->dest
));
1685 /* Returns the single exit edge of LOOP, or NULL if LOOP has either no exit
1686 or more than one exit. If loops do not have the exits recorded, NULL
1687 is returned always. */
1690 single_exit (const struct loop
*loop
)
1692 struct loop_exit
*exit
= loop
->exits
->next
;
1694 if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS
))
1697 if (exit
->e
&& exit
->next
== loop
->exits
)
1703 /* Returns true when BB has an incoming edge exiting LOOP. */
1706 loop_exits_to_bb_p (struct loop
*loop
, basic_block bb
)
1711 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1712 if (loop_exit_edge_p (loop
, e
))
1718 /* Returns true when BB has an outgoing edge exiting LOOP. */
1721 loop_exits_from_bb_p (struct loop
*loop
, basic_block bb
)
1726 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1727 if (loop_exit_edge_p (loop
, e
))
1733 /* Return location corresponding to the loop control condition if possible. */
1736 get_loop_location (struct loop
*loop
)
1739 struct niter_desc
*desc
= NULL
;
1742 /* For a for or while loop, we would like to return the location
1743 of the for or while statement, if possible. To do this, look
1744 for the branch guarding the loop back-edge. */
1746 /* If this is a simple loop with an in_edge, then the loop control
1747 branch is typically at the end of its source. */
1748 desc
= get_simple_loop_desc (loop
);
1751 FOR_BB_INSNS_REVERSE (desc
->in_edge
->src
, insn
)
1753 if (INSN_P (insn
) && INSN_HAS_LOCATION (insn
))
1754 return INSN_LOCATION (insn
);
1757 /* If loop has a single exit, then the loop control branch
1758 must be at the end of its source. */
1759 if ((exit
= single_exit (loop
)))
1761 FOR_BB_INSNS_REVERSE (exit
->src
, insn
)
1763 if (INSN_P (insn
) && INSN_HAS_LOCATION (insn
))
1764 return INSN_LOCATION (insn
);
1767 /* Next check the latch, to see if it is non-empty. */
1768 FOR_BB_INSNS_REVERSE (loop
->latch
, insn
)
1770 if (INSN_P (insn
) && INSN_HAS_LOCATION (insn
))
1771 return INSN_LOCATION (insn
);
1773 /* Finally, if none of the above identifies the loop control branch,
1774 return the first location in the loop header. */
1775 FOR_BB_INSNS (loop
->header
, insn
)
1777 if (INSN_P (insn
) && INSN_HAS_LOCATION (insn
))
1778 return INSN_LOCATION (insn
);
1780 /* If all else fails, simply return the current function location. */
1781 return DECL_SOURCE_LOCATION (current_function_decl
);
1784 /* Records that every statement in LOOP is executed I_BOUND times.
1785 REALISTIC is true if I_BOUND is expected to be close to the real number
1786 of iterations. UPPER is true if we are sure the loop iterates at most
1790 record_niter_bound (struct loop
*loop
, double_int i_bound
, bool realistic
,
1793 /* Update the bounds only when there is no previous estimation, or when the
1794 current estimation is smaller. */
1796 && (!loop
->any_upper_bound
1797 || i_bound
.ult (loop
->nb_iterations_upper_bound
)))
1799 loop
->any_upper_bound
= true;
1800 loop
->nb_iterations_upper_bound
= i_bound
;
1803 && (!loop
->any_estimate
1804 || i_bound
.ult (loop
->nb_iterations_estimate
)))
1806 loop
->any_estimate
= true;
1807 loop
->nb_iterations_estimate
= i_bound
;
1810 /* If an upper bound is smaller than the realistic estimate of the
1811 number of iterations, use the upper bound instead. */
1812 if (loop
->any_upper_bound
1813 && loop
->any_estimate
1814 && loop
->nb_iterations_upper_bound
.ult (loop
->nb_iterations_estimate
))
1815 loop
->nb_iterations_estimate
= loop
->nb_iterations_upper_bound
;
1818 /* Similar to get_estimated_loop_iterations, but returns the estimate only
1819 if it fits to HOST_WIDE_INT. If this is not the case, or the estimate
1820 on the number of iterations of LOOP could not be derived, returns -1. */
1823 get_estimated_loop_iterations_int (struct loop
*loop
)
1826 HOST_WIDE_INT hwi_nit
;
1828 if (!get_estimated_loop_iterations (loop
, &nit
))
1831 if (!nit
.fits_shwi ())
1833 hwi_nit
= nit
.to_shwi ();
1835 return hwi_nit
< 0 ? -1 : hwi_nit
;
1838 /* Returns an upper bound on the number of executions of statements
1839 in the LOOP. For statements before the loop exit, this exceeds
1840 the number of execution of the latch by one. */
1843 max_stmt_executions_int (struct loop
*loop
)
1845 HOST_WIDE_INT nit
= get_max_loop_iterations_int (loop
);
1851 snit
= (HOST_WIDE_INT
) ((unsigned HOST_WIDE_INT
) nit
+ 1);
1853 /* If the computation overflows, return -1. */
1854 return snit
< 0 ? -1 : snit
;
1857 /* Sets NIT to the estimated number of executions of the latch of the
1858 LOOP. If we have no reliable estimate, the function returns false, otherwise
1862 get_estimated_loop_iterations (struct loop
*loop
, double_int
*nit
)
1864 /* Even if the bound is not recorded, possibly we can derrive one from
1866 if (!loop
->any_estimate
)
1868 if (loop
->header
->count
)
1870 *nit
= gcov_type_to_double_int
1871 (expected_loop_iterations_unbounded (loop
) + 1);
1877 *nit
= loop
->nb_iterations_estimate
;
1881 /* Sets NIT to an upper bound for the maximum number of executions of the
1882 latch of the LOOP. If we have no reliable estimate, the function returns
1883 false, otherwise returns true. */
1886 get_max_loop_iterations (struct loop
*loop
, double_int
*nit
)
1888 if (!loop
->any_upper_bound
)
1891 *nit
= loop
->nb_iterations_upper_bound
;
1895 /* Similar to get_max_loop_iterations, but returns the estimate only
1896 if it fits to HOST_WIDE_INT. If this is not the case, or the estimate
1897 on the number of iterations of LOOP could not be derived, returns -1. */
1900 get_max_loop_iterations_int (struct loop
*loop
)
1903 HOST_WIDE_INT hwi_nit
;
1905 if (!get_max_loop_iterations (loop
, &nit
))
1908 if (!nit
.fits_shwi ())
1910 hwi_nit
= nit
.to_shwi ();
1912 return hwi_nit
< 0 ? -1 : hwi_nit
;
1915 /* Returns the loop depth of the loop BB belongs to. */
1918 bb_loop_depth (const_basic_block bb
)
1920 return bb
->loop_father
? loop_depth (bb
->loop_father
) : 0;