1 /* Natural loop discovery code for GNU compiler.
2 Copyright (C) 2000-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "gimple-ssa.h"
29 #include "diagnostic-core.h"
32 #include "gimple-iterator.h"
35 static void flow_loops_cfg_dump (FILE *);
37 /* Dump loop related CFG information. */
40 flow_loops_cfg_dump (FILE *file
)
47 FOR_EACH_BB_FN (bb
, cfun
)
52 fprintf (file
, ";; %d succs { ", bb
->index
);
53 FOR_EACH_EDGE (succ
, ei
, bb
->succs
)
54 fprintf (file
, "%d ", succ
->dest
->index
);
55 fprintf (file
, "}\n");
59 /* Return nonzero if the nodes of LOOP are a subset of OUTER. */
62 flow_loop_nested_p (const struct loop
*outer
, const struct loop
*loop
)
64 unsigned odepth
= loop_depth (outer
);
66 return (loop_depth (loop
) > odepth
67 && (*loop
->superloops
)[odepth
] == outer
);
70 /* Returns the loop such that LOOP is nested DEPTH (indexed from zero)
74 superloop_at_depth (struct loop
*loop
, unsigned depth
)
76 unsigned ldepth
= loop_depth (loop
);
78 gcc_assert (depth
<= ldepth
);
83 return (*loop
->superloops
)[depth
];
86 /* Returns the list of the latch edges of LOOP. */
89 get_loop_latch_edges (const struct loop
*loop
)
93 vec
<edge
> ret
= vNULL
;
95 FOR_EACH_EDGE (e
, ei
, loop
->header
->preds
)
97 if (dominated_by_p (CDI_DOMINATORS
, e
->src
, loop
->header
))
104 /* Dump the loop information specified by LOOP to the stream FILE
105 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
108 flow_loop_dump (const struct loop
*loop
, FILE *file
,
109 void (*loop_dump_aux
) (const struct loop
*, FILE *, int),
117 if (! loop
|| ! loop
->header
)
120 fprintf (file
, ";;\n;; Loop %d\n", loop
->num
);
122 fprintf (file
, ";; header %d, ", loop
->header
->index
);
124 fprintf (file
, "latch %d\n", loop
->latch
->index
);
127 fprintf (file
, "multiple latches:");
128 latches
= get_loop_latch_edges (loop
);
129 FOR_EACH_VEC_ELT (latches
, i
, e
)
130 fprintf (file
, " %d", e
->src
->index
);
132 fprintf (file
, "\n");
135 fprintf (file
, ";; depth %d, outer %ld\n",
136 loop_depth (loop
), (long) (loop_outer (loop
)
137 ? loop_outer (loop
)->num
: -1));
142 gcov_type nit
= expected_loop_iterations_unbounded (loop
, &read_profile_p
);
143 if (read_profile_p
&& !loop
->any_estimate
)
144 fprintf (file
, ";; profile-based iteration count: %" PRIu64
"\n",
148 fprintf (file
, ";; nodes:");
149 bbs
= get_loop_body (loop
);
150 for (i
= 0; i
< loop
->num_nodes
; i
++)
151 fprintf (file
, " %d", bbs
[i
]->index
);
153 fprintf (file
, "\n");
156 loop_dump_aux (loop
, file
, verbose
);
159 /* Dump the loop information about loops to the stream FILE,
160 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
163 flow_loops_dump (FILE *file
, void (*loop_dump_aux
) (const struct loop
*, FILE *, int), int verbose
)
167 if (!current_loops
|| ! file
)
170 fprintf (file
, ";; %d loops found\n", number_of_loops (cfun
));
172 FOR_EACH_LOOP (loop
, LI_INCLUDE_ROOT
)
174 flow_loop_dump (loop
, file
, loop_dump_aux
, verbose
);
178 flow_loops_cfg_dump (file
);
181 /* Free data allocated for LOOP. */
184 flow_loop_free (struct loop
*loop
)
186 struct loop_exit
*exit
, *next
;
188 vec_free (loop
->superloops
);
190 /* Break the list of the loop exit records. They will be freed when the
191 corresponding edge is rescanned or removed, and this avoids
192 accessing the (already released) head of the list stored in the
194 for (exit
= loop
->exits
->next
; exit
!= loop
->exits
; exit
= next
)
201 ggc_free (loop
->exits
);
205 /* Free all the memory allocated for LOOPS. */
208 flow_loops_free (struct loops
*loops
)
215 /* Free the loop descriptors. */
216 FOR_EACH_VEC_SAFE_ELT (loops
->larray
, i
, loop
)
221 flow_loop_free (loop
);
224 vec_free (loops
->larray
);
228 /* Find the nodes contained within the LOOP with header HEADER.
229 Return the number of nodes within the loop. */
232 flow_loop_nodes_find (basic_block header
, struct loop
*loop
)
234 vec
<basic_block
> stack
= vNULL
;
237 edge_iterator latch_ei
;
239 header
->loop_father
= loop
;
241 FOR_EACH_EDGE (latch
, latch_ei
, loop
->header
->preds
)
243 if (latch
->src
->loop_father
== loop
244 || !dominated_by_p (CDI_DOMINATORS
, latch
->src
, loop
->header
))
248 stack
.safe_push (latch
->src
);
249 latch
->src
->loop_father
= loop
;
251 while (!stack
.is_empty ())
259 FOR_EACH_EDGE (e
, ei
, node
->preds
)
261 basic_block ancestor
= e
->src
;
263 if (ancestor
->loop_father
!= loop
)
265 ancestor
->loop_father
= loop
;
267 stack
.safe_push (ancestor
);
277 /* Records the vector of superloops of the loop LOOP, whose immediate
278 superloop is FATHER. */
281 establish_preds (struct loop
*loop
, struct loop
*father
)
284 unsigned depth
= loop_depth (father
) + 1;
287 loop
->superloops
= 0;
288 vec_alloc (loop
->superloops
, depth
);
289 FOR_EACH_VEC_SAFE_ELT (father
->superloops
, i
, ploop
)
290 loop
->superloops
->quick_push (ploop
);
291 loop
->superloops
->quick_push (father
);
293 for (ploop
= loop
->inner
; ploop
; ploop
= ploop
->next
)
294 establish_preds (ploop
, loop
);
297 /* Add LOOP to the loop hierarchy tree where FATHER is father of the
298 added loop. If LOOP has some children, take care of that their
299 pred field will be initialized correctly. If AFTER is non-null
300 then it's expected it's a pointer into FATHERs inner sibling
301 list and LOOP is added behind AFTER, otherwise it's added in front
302 of FATHERs siblings. */
305 flow_loop_tree_node_add (struct loop
*father
, struct loop
*loop
,
310 loop
->next
= after
->next
;
315 loop
->next
= father
->inner
;
316 father
->inner
= loop
;
319 establish_preds (loop
, father
);
322 /* Remove LOOP from the loop hierarchy tree. */
325 flow_loop_tree_node_remove (struct loop
*loop
)
327 struct loop
*prev
, *father
;
329 father
= loop_outer (loop
);
331 /* Remove loop from the list of sons. */
332 if (father
->inner
== loop
)
333 father
->inner
= loop
->next
;
336 for (prev
= father
->inner
; prev
->next
!= loop
; prev
= prev
->next
)
338 prev
->next
= loop
->next
;
341 loop
->superloops
= NULL
;
344 /* Allocates and returns new loop structure. */
349 struct loop
*loop
= ggc_cleared_alloc
<struct loop
> ();
351 loop
->exits
= ggc_cleared_alloc
<loop_exit
> ();
352 loop
->exits
->next
= loop
->exits
->prev
= loop
->exits
;
353 loop
->can_be_parallel
= false;
354 loop
->constraints
= 0;
355 loop
->nb_iterations_upper_bound
= 0;
356 loop
->nb_iterations_likely_upper_bound
= 0;
357 loop
->nb_iterations_estimate
= 0;
361 /* Initializes loops structure LOOPS, reserving place for NUM_LOOPS loops
362 (including the root of the loop tree). */
365 init_loops_structure (struct function
*fn
,
366 struct loops
*loops
, unsigned num_loops
)
370 memset (loops
, 0, sizeof *loops
);
371 vec_alloc (loops
->larray
, num_loops
);
373 /* Dummy loop containing whole function. */
374 root
= alloc_loop ();
375 root
->num_nodes
= n_basic_blocks_for_fn (fn
);
376 root
->latch
= EXIT_BLOCK_PTR_FOR_FN (fn
);
377 root
->header
= ENTRY_BLOCK_PTR_FOR_FN (fn
);
378 ENTRY_BLOCK_PTR_FOR_FN (fn
)->loop_father
= root
;
379 EXIT_BLOCK_PTR_FOR_FN (fn
)->loop_father
= root
;
381 loops
->larray
->quick_push (root
);
382 loops
->tree_root
= root
;
385 /* Returns whether HEADER is a loop header. */
388 bb_loop_header_p (basic_block header
)
393 /* If we have an abnormal predecessor, do not consider the
394 loop (not worth the problems). */
395 if (bb_has_abnormal_pred (header
))
398 /* Look for back edges where a predecessor is dominated
399 by this block. A natural loop has a single entry
400 node (header) that dominates all the nodes in the
401 loop. It also has single back edge to the header
402 from a latch node. */
403 FOR_EACH_EDGE (e
, ei
, header
->preds
)
405 basic_block latch
= e
->src
;
406 if (latch
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
407 && dominated_by_p (CDI_DOMINATORS
, latch
, header
))
414 /* Find all the natural loops in the function and save in LOOPS structure and
415 recalculate loop_father information in basic block structures.
416 If LOOPS is non-NULL then the loop structures for already recorded loops
417 will be re-used and their number will not change. We assume that no
418 stale loops exist in LOOPS.
419 When LOOPS is NULL it is allocated and re-built from scratch.
420 Return the built LOOPS structure. */
423 flow_loops_find (struct loops
*loops
)
425 bool from_scratch
= (loops
== NULL
);
430 /* Ensure that the dominators are computed. */
431 calculate_dominance_info (CDI_DOMINATORS
);
435 loops
= ggc_cleared_alloc
<struct loops
> ();
436 init_loops_structure (cfun
, loops
, 1);
439 /* Ensure that loop exits were released. */
440 gcc_assert (loops
->exits
== NULL
);
442 /* Taking care of this degenerate case makes the rest of
443 this code simpler. */
444 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
447 /* The root loop node contains all basic-blocks. */
448 loops
->tree_root
->num_nodes
= n_basic_blocks_for_fn (cfun
);
450 /* Compute depth first search order of the CFG so that outer
451 natural loops will be found before inner natural loops. */
452 rc_order
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
));
453 pre_and_rev_post_order_compute (NULL
, rc_order
, false);
455 /* Gather all loop headers in reverse completion order and allocate
456 loop structures for loops that are not already present. */
457 auto_vec
<loop_p
> larray (loops
->larray
->length ());
458 for (b
= 0; b
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; b
++)
460 basic_block header
= BASIC_BLOCK_FOR_FN (cfun
, rc_order
[b
]);
461 if (bb_loop_header_p (header
))
465 /* The current active loop tree has valid loop-fathers for
468 && header
->loop_father
->header
== header
)
470 loop
= header
->loop_father
;
471 /* If we found an existing loop remove it from the
472 loop tree. It is going to be inserted again
474 flow_loop_tree_node_remove (loop
);
478 /* Otherwise allocate a new loop structure for the loop. */
479 loop
= alloc_loop ();
480 /* ??? We could re-use unused loop slots here. */
481 loop
->num
= loops
->larray
->length ();
482 vec_safe_push (loops
->larray
, loop
);
483 loop
->header
= header
;
486 && dump_file
&& (dump_flags
& TDF_DETAILS
))
487 fprintf (dump_file
, "flow_loops_find: discovered new "
488 "loop %d with header %d\n",
489 loop
->num
, header
->index
);
491 /* Reset latch, we recompute it below. */
493 larray
.safe_push (loop
);
496 /* Make blocks part of the loop root node at start. */
497 header
->loop_father
= loops
->tree_root
;
502 /* Now iterate over the loops found, insert them into the loop tree
503 and assign basic-block ownership. */
504 for (i
= 0; i
< larray
.length (); ++i
)
506 struct loop
*loop
= larray
[i
];
507 basic_block header
= loop
->header
;
511 flow_loop_tree_node_add (header
->loop_father
, loop
);
512 loop
->num_nodes
= flow_loop_nodes_find (loop
->header
, loop
);
514 /* Look for the latch for this header block, if it has just a
516 FOR_EACH_EDGE (e
, ei
, header
->preds
)
518 basic_block latch
= e
->src
;
520 if (flow_bb_inside_loop_p (loop
, latch
))
522 if (loop
->latch
!= NULL
)
524 /* More than one latch edge. */
536 /* qsort helper for sort_sibling_loops. */
538 static int *sort_sibling_loops_cmp_rpo
;
540 sort_sibling_loops_cmp (const void *la_
, const void *lb_
)
542 const struct loop
*la
= *(const struct loop
* const *)la_
;
543 const struct loop
*lb
= *(const struct loop
* const *)lb_
;
544 return (sort_sibling_loops_cmp_rpo
[la
->header
->index
]
545 - sort_sibling_loops_cmp_rpo
[lb
->header
->index
]);
548 /* Sort sibling loops in RPO order. */
551 sort_sibling_loops (function
*fn
)
553 /* Match flow_loops_find in the order we sort sibling loops. */
554 sort_sibling_loops_cmp_rpo
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
555 int *rc_order
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
));
556 pre_and_rev_post_order_compute_fn (fn
, NULL
, rc_order
, false);
557 for (int i
= 0; i
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; ++i
)
558 sort_sibling_loops_cmp_rpo
[rc_order
[i
]] = i
;
561 auto_vec
<loop_p
, 3> siblings
;
563 FOR_EACH_LOOP_FN (fn
, loop
, LI_INCLUDE_ROOT
)
564 if (loop
->inner
&& loop
->inner
->next
)
566 loop_p sibling
= loop
->inner
;
569 siblings
.safe_push (sibling
);
570 sibling
= sibling
->next
;
573 siblings
.qsort (sort_sibling_loops_cmp
);
574 loop_p
*siblingp
= &loop
->inner
;
575 for (unsigned i
= 0; i
< siblings
.length (); ++i
)
577 *siblingp
= siblings
[i
];
578 siblingp
= &(*siblingp
)->next
;
581 siblings
.truncate (0);
584 free (sort_sibling_loops_cmp_rpo
);
585 sort_sibling_loops_cmp_rpo
= NULL
;
588 /* Ratio of frequencies of edges so that one of more latch edges is
589 considered to belong to inner loop with same header. */
590 #define HEAVY_EDGE_RATIO 8
592 /* Minimum number of samples for that we apply
593 find_subloop_latch_edge_by_profile heuristics. */
594 #define HEAVY_EDGE_MIN_SAMPLES 10
596 /* If the profile info is available, finds an edge in LATCHES that much more
597 frequent than the remaining edges. Returns such an edge, or NULL if we do
600 We do not use guessed profile here, only the measured one. The guessed
601 profile is usually too flat and unreliable for this (and it is mostly based
602 on the loop structure of the program, so it does not make much sense to
603 derive the loop structure from it). */
606 find_subloop_latch_edge_by_profile (vec
<edge
> latches
)
610 profile_count mcount
= profile_count::zero (), tcount
= profile_count::zero ();
612 FOR_EACH_VEC_ELT (latches
, i
, e
)
614 if (e
->count ()> mcount
)
619 tcount
+= e
->count();
622 if (!tcount
.initialized_p () || !(tcount
.ipa () > HEAVY_EDGE_MIN_SAMPLES
)
623 || (tcount
- mcount
).apply_scale (HEAVY_EDGE_RATIO
, 1) > tcount
)
628 "Found latch edge %d -> %d using profile information.\n",
629 me
->src
->index
, me
->dest
->index
);
633 /* Among LATCHES, guesses a latch edge of LOOP corresponding to subloop, based
634 on the structure of induction variables. Returns this edge, or NULL if we
637 We are quite conservative, and look just for an obvious simple innermost
638 loop (which is the case where we would lose the most performance by not
639 disambiguating the loop). More precisely, we look for the following
640 situation: The source of the chosen latch edge dominates sources of all
641 the other latch edges. Additionally, the header does not contain a phi node
642 such that the argument from the chosen edge is equal to the argument from
646 find_subloop_latch_edge_by_ivs (struct loop
*loop ATTRIBUTE_UNUSED
, vec
<edge
> latches
)
648 edge e
, latch
= latches
[0];
655 /* Find the candidate for the latch edge. */
656 for (i
= 1; latches
.iterate (i
, &e
); i
++)
657 if (dominated_by_p (CDI_DOMINATORS
, latch
->src
, e
->src
))
660 /* Verify that it dominates all the latch edges. */
661 FOR_EACH_VEC_ELT (latches
, i
, e
)
662 if (!dominated_by_p (CDI_DOMINATORS
, e
->src
, latch
->src
))
665 /* Check for a phi node that would deny that this is a latch edge of
667 for (psi
= gsi_start_phis (loop
->header
); !gsi_end_p (psi
); gsi_next (&psi
))
670 lop
= PHI_ARG_DEF_FROM_EDGE (phi
, latch
);
672 /* Ignore the values that are not changed inside the subloop. */
673 if (TREE_CODE (lop
) != SSA_NAME
674 || SSA_NAME_DEF_STMT (lop
) == phi
)
676 bb
= gimple_bb (SSA_NAME_DEF_STMT (lop
));
677 if (!bb
|| !flow_bb_inside_loop_p (loop
, bb
))
680 FOR_EACH_VEC_ELT (latches
, i
, e
)
682 && PHI_ARG_DEF_FROM_EDGE (phi
, e
) == lop
)
688 "Found latch edge %d -> %d using iv structure.\n",
689 latch
->src
->index
, latch
->dest
->index
);
693 /* If we can determine that one of the several latch edges of LOOP behaves
694 as a latch edge of a separate subloop, returns this edge. Otherwise
698 find_subloop_latch_edge (struct loop
*loop
)
700 vec
<edge
> latches
= get_loop_latch_edges (loop
);
703 if (latches
.length () > 1)
705 latch
= find_subloop_latch_edge_by_profile (latches
);
708 /* We consider ivs to guess the latch edge only in SSA. Perhaps we
709 should use cfghook for this, but it is hard to imagine it would
710 be useful elsewhere. */
711 && current_ir_type () == IR_GIMPLE
)
712 latch
= find_subloop_latch_edge_by_ivs (loop
, latches
);
719 /* Callback for make_forwarder_block. Returns true if the edge E is marked
720 in the set MFB_REIS_SET. */
722 static hash_set
<edge
> *mfb_reis_set
;
724 mfb_redirect_edges_in_set (edge e
)
726 return mfb_reis_set
->contains (e
);
729 /* Creates a subloop of LOOP with latch edge LATCH. */
732 form_subloop (struct loop
*loop
, edge latch
)
736 struct loop
*new_loop
;
738 mfb_reis_set
= new hash_set
<edge
>;
739 FOR_EACH_EDGE (e
, ei
, loop
->header
->preds
)
742 mfb_reis_set
->add (e
);
744 new_entry
= make_forwarder_block (loop
->header
, mfb_redirect_edges_in_set
,
748 loop
->header
= new_entry
->src
;
750 /* Find the blocks and subloops that belong to the new loop, and add it to
751 the appropriate place in the loop tree. */
752 new_loop
= alloc_loop ();
753 new_loop
->header
= new_entry
->dest
;
754 new_loop
->latch
= latch
->src
;
755 add_loop (new_loop
, loop
);
758 /* Make all the latch edges of LOOP to go to a single forwarder block --
759 a new latch of LOOP. */
762 merge_latch_edges (struct loop
*loop
)
764 vec
<edge
> latches
= get_loop_latch_edges (loop
);
768 gcc_assert (latches
.length () > 0);
770 if (latches
.length () == 1)
771 loop
->latch
= latches
[0]->src
;
775 fprintf (dump_file
, "Merged latch edges of loop %d\n", loop
->num
);
777 mfb_reis_set
= new hash_set
<edge
>;
778 FOR_EACH_VEC_ELT (latches
, i
, e
)
779 mfb_reis_set
->add (e
);
780 latch
= make_forwarder_block (loop
->header
, mfb_redirect_edges_in_set
,
784 loop
->header
= latch
->dest
;
785 loop
->latch
= latch
->src
;
791 /* LOOP may have several latch edges. Transform it into (possibly several)
792 loops with single latch edge. */
795 disambiguate_multiple_latches (struct loop
*loop
)
799 /* We eliminate the multiple latches by splitting the header to the forwarder
800 block F and the rest R, and redirecting the edges. There are two cases:
802 1) If there is a latch edge E that corresponds to a subloop (we guess
803 that based on profile -- if it is taken much more often than the
804 remaining edges; and on trees, using the information about induction
805 variables of the loops), we redirect E to R, all the remaining edges to
806 F, then rescan the loops and try again for the outer loop.
807 2) If there is no such edge, we redirect all latch edges to F, and the
808 entry edges to R, thus making F the single latch of the loop. */
811 fprintf (dump_file
, "Disambiguating loop %d with multiple latches\n",
814 /* During latch merging, we may need to redirect the entry edges to a new
815 block. This would cause problems if the entry edge was the one from the
816 entry block. To avoid having to handle this case specially, split
818 e
= find_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), loop
->header
);
824 e
= find_subloop_latch_edge (loop
);
828 form_subloop (loop
, e
);
831 merge_latch_edges (loop
);
834 /* Split loops with multiple latch edges. */
837 disambiguate_loops_with_multiple_latches (void)
841 FOR_EACH_LOOP (loop
, 0)
844 disambiguate_multiple_latches (loop
);
848 /* Return nonzero if basic block BB belongs to LOOP. */
850 flow_bb_inside_loop_p (const struct loop
*loop
, const_basic_block bb
)
852 struct loop
*source_loop
;
854 if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
855 || bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
858 source_loop
= bb
->loop_father
;
859 return loop
== source_loop
|| flow_loop_nested_p (loop
, source_loop
);
862 /* Enumeration predicate for get_loop_body_with_size. */
864 glb_enum_p (const_basic_block bb
, const void *glb_loop
)
866 const struct loop
*const loop
= (const struct loop
*) glb_loop
;
867 return (bb
!= loop
->header
868 && dominated_by_p (CDI_DOMINATORS
, bb
, loop
->header
));
871 /* Gets basic blocks of a LOOP. Header is the 0-th block, rest is in dfs
872 order against direction of edges from latch. Specially, if
873 header != latch, latch is the 1-st block. LOOP cannot be the fake
874 loop tree root, and its size must be at most MAX_SIZE. The blocks
875 in the LOOP body are stored to BODY, and the size of the LOOP is
879 get_loop_body_with_size (const struct loop
*loop
, basic_block
*body
,
882 return dfs_enumerate_from (loop
->header
, 1, glb_enum_p
,
883 body
, max_size
, loop
);
886 /* Gets basic blocks of a LOOP. Header is the 0-th block, rest is in dfs
887 order against direction of edges from latch. Specially, if
888 header != latch, latch is the 1-st block. */
891 get_loop_body (const struct loop
*loop
)
893 basic_block
*body
, bb
;
896 gcc_assert (loop
->num_nodes
);
898 body
= XNEWVEC (basic_block
, loop
->num_nodes
);
900 if (loop
->latch
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
902 /* There may be blocks unreachable from EXIT_BLOCK, hence we need to
903 special-case the fake loop that contains the whole function. */
904 gcc_assert (loop
->num_nodes
== (unsigned) n_basic_blocks_for_fn (cfun
));
905 body
[tv
++] = loop
->header
;
906 body
[tv
++] = EXIT_BLOCK_PTR_FOR_FN (cfun
);
907 FOR_EACH_BB_FN (bb
, cfun
)
911 tv
= get_loop_body_with_size (loop
, body
, loop
->num_nodes
);
913 gcc_assert (tv
== loop
->num_nodes
);
917 /* Fills dominance descendants inside LOOP of the basic block BB into
918 array TOVISIT from index *TV. */
921 fill_sons_in_loop (const struct loop
*loop
, basic_block bb
,
922 basic_block
*tovisit
, int *tv
)
924 basic_block son
, postpone
= NULL
;
926 tovisit
[(*tv
)++] = bb
;
927 for (son
= first_dom_son (CDI_DOMINATORS
, bb
);
929 son
= next_dom_son (CDI_DOMINATORS
, son
))
931 if (!flow_bb_inside_loop_p (loop
, son
))
934 if (dominated_by_p (CDI_DOMINATORS
, loop
->latch
, son
))
939 fill_sons_in_loop (loop
, son
, tovisit
, tv
);
943 fill_sons_in_loop (loop
, postpone
, tovisit
, tv
);
946 /* Gets body of a LOOP (that must be different from the outermost loop)
947 sorted by dominance relation. Additionally, if a basic block s dominates
948 the latch, then only blocks dominated by s are be after it. */
951 get_loop_body_in_dom_order (const struct loop
*loop
)
953 basic_block
*tovisit
;
956 gcc_assert (loop
->num_nodes
);
958 tovisit
= XNEWVEC (basic_block
, loop
->num_nodes
);
960 gcc_assert (loop
->latch
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
963 fill_sons_in_loop (loop
, loop
->header
, tovisit
, &tv
);
965 gcc_assert (tv
== (int) loop
->num_nodes
);
970 /* Gets body of a LOOP sorted via provided BB_COMPARATOR. */
973 get_loop_body_in_custom_order (const struct loop
*loop
,
974 int (*bb_comparator
) (const void *, const void *))
976 basic_block
*bbs
= get_loop_body (loop
);
978 qsort (bbs
, loop
->num_nodes
, sizeof (basic_block
), bb_comparator
);
983 /* Get body of a LOOP in breadth first sort order. */
986 get_loop_body_in_bfs_order (const struct loop
*loop
)
993 gcc_assert (loop
->num_nodes
);
994 gcc_assert (loop
->latch
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
996 blocks
= XNEWVEC (basic_block
, loop
->num_nodes
);
998 blocks
[0] = loop
->header
;
999 bitmap_set_bit (visited
, loop
->header
->index
);
1000 while (i
< loop
->num_nodes
)
1004 gcc_assert (i
> vc
);
1007 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1009 if (flow_bb_inside_loop_p (loop
, e
->dest
))
1011 /* This bb is now visited. */
1012 if (bitmap_set_bit (visited
, e
->dest
->index
))
1013 blocks
[i
++] = e
->dest
;
1021 /* Hash function for struct loop_exit. */
1024 loop_exit_hasher::hash (loop_exit
*exit
)
1026 return htab_hash_pointer (exit
->e
);
1029 /* Equality function for struct loop_exit. Compares with edge. */
1032 loop_exit_hasher::equal (loop_exit
*exit
, edge e
)
1034 return exit
->e
== e
;
1037 /* Frees the list of loop exit descriptions EX. */
1040 loop_exit_hasher::remove (loop_exit
*exit
)
1043 for (; exit
; exit
= next
)
1045 next
= exit
->next_e
;
1047 exit
->next
->prev
= exit
->prev
;
1048 exit
->prev
->next
= exit
->next
;
1054 /* Returns the list of records for E as an exit of a loop. */
1056 static struct loop_exit
*
1057 get_exit_descriptions (edge e
)
1059 return current_loops
->exits
->find_with_hash (e
, htab_hash_pointer (e
));
1062 /* Updates the lists of loop exits in that E appears.
1063 If REMOVED is true, E is being removed, and we
1064 just remove it from the lists of exits.
1065 If NEW_EDGE is true and E is not a loop exit, we
1066 do not try to remove it from loop exit lists. */
1069 rescan_loop_exit (edge e
, bool new_edge
, bool removed
)
1071 struct loop_exit
*exits
= NULL
, *exit
;
1072 struct loop
*aloop
, *cloop
;
1074 if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS
))
1078 && e
->src
->loop_father
!= NULL
1079 && e
->dest
->loop_father
!= NULL
1080 && !flow_bb_inside_loop_p (e
->src
->loop_father
, e
->dest
))
1082 cloop
= find_common_loop (e
->src
->loop_father
, e
->dest
->loop_father
);
1083 for (aloop
= e
->src
->loop_father
;
1085 aloop
= loop_outer (aloop
))
1087 exit
= ggc_alloc
<loop_exit
> ();
1090 exit
->next
= aloop
->exits
->next
;
1091 exit
->prev
= aloop
->exits
;
1092 exit
->next
->prev
= exit
;
1093 exit
->prev
->next
= exit
;
1095 exit
->next_e
= exits
;
1100 if (!exits
&& new_edge
)
1104 = current_loops
->exits
->find_slot_with_hash (e
, htab_hash_pointer (e
),
1105 exits
? INSERT
: NO_INSERT
);
1112 loop_exit_hasher::remove (*slot
);
1116 current_loops
->exits
->clear_slot (slot
);
1119 /* For each loop, record list of exit edges, and start maintaining these
1123 record_loop_exits (void)
1132 if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS
))
1134 loops_state_set (LOOPS_HAVE_RECORDED_EXITS
);
1136 gcc_assert (current_loops
->exits
== NULL
);
1137 current_loops
->exits
1138 = hash_table
<loop_exit_hasher
>::create_ggc (2 * number_of_loops (cfun
));
1140 FOR_EACH_BB_FN (bb
, cfun
)
1142 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1144 rescan_loop_exit (e
, true, false);
1149 /* Dumps information about the exit in *SLOT to FILE.
1150 Callback for htab_traverse. */
1153 dump_recorded_exit (loop_exit
**slot
, FILE *file
)
1155 struct loop_exit
*exit
= *slot
;
1159 for (; exit
!= NULL
; exit
= exit
->next_e
)
1162 fprintf (file
, "Edge %d->%d exits %u loops\n",
1163 e
->src
->index
, e
->dest
->index
, n
);
1168 /* Dumps the recorded exits of loops to FILE. */
1170 extern void dump_recorded_exits (FILE *);
1172 dump_recorded_exits (FILE *file
)
1174 if (!current_loops
->exits
)
1176 current_loops
->exits
->traverse
<FILE *, dump_recorded_exit
> (file
);
1179 /* Releases lists of loop exits. */
1182 release_recorded_exits (function
*fn
)
1184 gcc_assert (loops_state_satisfies_p (fn
, LOOPS_HAVE_RECORDED_EXITS
));
1185 loops_for_fn (fn
)->exits
->empty ();
1186 loops_for_fn (fn
)->exits
= NULL
;
1187 loops_state_clear (fn
, LOOPS_HAVE_RECORDED_EXITS
);
1190 /* Returns the list of the exit edges of a LOOP. */
1193 get_loop_exit_edges (const struct loop
*loop
)
1195 vec
<edge
> edges
= vNULL
;
1200 struct loop_exit
*exit
;
1202 gcc_assert (loop
->latch
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
1204 /* If we maintain the lists of exits, use them. Otherwise we must
1205 scan the body of the loop. */
1206 if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS
))
1208 for (exit
= loop
->exits
->next
; exit
->e
; exit
= exit
->next
)
1209 edges
.safe_push (exit
->e
);
1213 body
= get_loop_body (loop
);
1214 for (i
= 0; i
< loop
->num_nodes
; i
++)
1215 FOR_EACH_EDGE (e
, ei
, body
[i
]->succs
)
1217 if (!flow_bb_inside_loop_p (loop
, e
->dest
))
1218 edges
.safe_push (e
);
1226 /* Counts the number of conditional branches inside LOOP. */
1229 num_loop_branches (const struct loop
*loop
)
1234 gcc_assert (loop
->latch
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
1236 body
= get_loop_body (loop
);
1238 for (i
= 0; i
< loop
->num_nodes
; i
++)
1239 if (EDGE_COUNT (body
[i
]->succs
) >= 2)
1246 /* Adds basic block BB to LOOP. */
1248 add_bb_to_loop (basic_block bb
, struct loop
*loop
)
1255 gcc_assert (bb
->loop_father
== NULL
);
1256 bb
->loop_father
= loop
;
1258 FOR_EACH_VEC_SAFE_ELT (loop
->superloops
, i
, ploop
)
1261 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1263 rescan_loop_exit (e
, true, false);
1265 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1267 rescan_loop_exit (e
, true, false);
1271 /* Remove basic block BB from loops. */
1273 remove_bb_from_loops (basic_block bb
)
1276 struct loop
*loop
= bb
->loop_father
;
1281 gcc_assert (loop
!= NULL
);
1283 FOR_EACH_VEC_SAFE_ELT (loop
->superloops
, i
, ploop
)
1285 bb
->loop_father
= NULL
;
1287 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1289 rescan_loop_exit (e
, false, true);
1291 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1293 rescan_loop_exit (e
, false, true);
1297 /* Finds nearest common ancestor in loop tree for given loops. */
1299 find_common_loop (struct loop
*loop_s
, struct loop
*loop_d
)
1301 unsigned sdepth
, ddepth
;
1303 if (!loop_s
) return loop_d
;
1304 if (!loop_d
) return loop_s
;
1306 sdepth
= loop_depth (loop_s
);
1307 ddepth
= loop_depth (loop_d
);
1309 if (sdepth
< ddepth
)
1310 loop_d
= (*loop_d
->superloops
)[sdepth
];
1311 else if (sdepth
> ddepth
)
1312 loop_s
= (*loop_s
->superloops
)[ddepth
];
1314 while (loop_s
!= loop_d
)
1316 loop_s
= loop_outer (loop_s
);
1317 loop_d
= loop_outer (loop_d
);
1322 /* Removes LOOP from structures and frees its data. */
1325 delete_loop (struct loop
*loop
)
1327 /* Remove the loop from structure. */
1328 flow_loop_tree_node_remove (loop
);
1330 /* Remove loop from loops array. */
1331 (*current_loops
->larray
)[loop
->num
] = NULL
;
1333 /* Free loop data. */
1334 flow_loop_free (loop
);
1337 /* Cancels the LOOP; it must be innermost one. */
1340 cancel_loop (struct loop
*loop
)
1344 struct loop
*outer
= loop_outer (loop
);
1346 gcc_assert (!loop
->inner
);
1348 /* Move blocks up one level (they should be removed as soon as possible). */
1349 bbs
= get_loop_body (loop
);
1350 for (i
= 0; i
< loop
->num_nodes
; i
++)
1351 bbs
[i
]->loop_father
= outer
;
1357 /* Cancels LOOP and all its subloops. */
1359 cancel_loop_tree (struct loop
*loop
)
1362 cancel_loop_tree (loop
->inner
);
1366 /* Checks that information about loops is correct
1367 -- sizes of loops are all right
1368 -- results of get_loop_body really belong to the loop
1369 -- loop header have just single entry edge and single latch edge
1370 -- loop latches have only single successor that is header of their loop
1371 -- irreducible loops are correctly marked
1372 -- the cached loop depth and loop father of each bb is correct
1375 verify_loop_structure (void)
1377 unsigned *sizes
, i
, j
;
1378 basic_block bb
, *bbs
;
1382 unsigned num
= number_of_loops (cfun
);
1383 struct loop_exit
*exit
, *mexit
;
1384 bool dom_available
= dom_info_available_p (CDI_DOMINATORS
);
1386 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP
))
1388 error ("loop verification on loop tree that needs fixup");
1392 /* We need up-to-date dominators, compute or verify them. */
1394 calculate_dominance_info (CDI_DOMINATORS
);
1396 verify_dominators (CDI_DOMINATORS
);
1398 /* Check the loop tree root. */
1399 if (current_loops
->tree_root
->header
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
1400 || current_loops
->tree_root
->latch
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
1401 || (current_loops
->tree_root
->num_nodes
1402 != (unsigned) n_basic_blocks_for_fn (cfun
)))
1404 error ("corrupt loop tree root");
1408 /* Check the headers. */
1409 FOR_EACH_BB_FN (bb
, cfun
)
1410 if (bb_loop_header_p (bb
))
1412 if (bb
->loop_father
->header
== NULL
)
1414 error ("loop with header %d marked for removal", bb
->index
);
1417 else if (bb
->loop_father
->header
!= bb
)
1419 error ("loop with header %d not in loop tree", bb
->index
);
1423 else if (bb
->loop_father
->header
== bb
)
1425 error ("non-loop with header %d not marked for removal", bb
->index
);
1429 /* Check the recorded loop father and sizes of loops. */
1430 auto_sbitmap
visited (last_basic_block_for_fn (cfun
));
1431 bitmap_clear (visited
);
1432 bbs
= XNEWVEC (basic_block
, n_basic_blocks_for_fn (cfun
));
1433 FOR_EACH_LOOP (loop
, LI_FROM_INNERMOST
)
1437 if (loop
->header
== NULL
)
1439 error ("removed loop %d in loop tree", loop
->num
);
1444 n
= get_loop_body_with_size (loop
, bbs
, n_basic_blocks_for_fn (cfun
));
1445 if (loop
->num_nodes
!= n
)
1447 error ("size of loop %d should be %d, not %d",
1448 loop
->num
, n
, loop
->num_nodes
);
1452 for (j
= 0; j
< n
; j
++)
1456 if (!flow_bb_inside_loop_p (loop
, bb
))
1458 error ("bb %d does not belong to loop %d",
1459 bb
->index
, loop
->num
);
1463 /* Ignore this block if it is in an inner loop. */
1464 if (bitmap_bit_p (visited
, bb
->index
))
1466 bitmap_set_bit (visited
, bb
->index
);
1468 if (bb
->loop_father
!= loop
)
1470 error ("bb %d has father loop %d, should be loop %d",
1471 bb
->index
, bb
->loop_father
->num
, loop
->num
);
1478 /* Check headers and latches. */
1479 FOR_EACH_LOOP (loop
, 0)
1482 if (loop
->header
== NULL
)
1484 if (!bb_loop_header_p (loop
->header
))
1486 error ("loop %d%'s header is not a loop header", i
);
1489 if (loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS
)
1490 && EDGE_COUNT (loop
->header
->preds
) != 2)
1492 error ("loop %d%'s header does not have exactly 2 entries", i
);
1497 if (!find_edge (loop
->latch
, loop
->header
))
1499 error ("loop %d%'s latch does not have an edge to its header", i
);
1502 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, loop
->header
))
1504 error ("loop %d%'s latch is not dominated by its header", i
);
1508 if (loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
))
1510 if (!single_succ_p (loop
->latch
))
1512 error ("loop %d%'s latch does not have exactly 1 successor", i
);
1515 if (single_succ (loop
->latch
) != loop
->header
)
1517 error ("loop %d%'s latch does not have header as successor", i
);
1520 if (loop
->latch
->loop_father
!= loop
)
1522 error ("loop %d%'s latch does not belong directly to it", i
);
1526 if (loop
->header
->loop_father
!= loop
)
1528 error ("loop %d%'s header does not belong directly to it", i
);
1531 if (loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS
)
1532 && (loop_latch_edge (loop
)->flags
& EDGE_IRREDUCIBLE_LOOP
))
1534 error ("loop %d%'s latch is marked as part of irreducible region", i
);
1539 /* Check irreducible loops. */
1540 if (loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS
))
1542 /* Record old info. */
1543 auto_sbitmap
irreds (last_basic_block_for_fn (cfun
));
1544 FOR_EACH_BB_FN (bb
, cfun
)
1547 if (bb
->flags
& BB_IRREDUCIBLE_LOOP
)
1548 bitmap_set_bit (irreds
, bb
->index
);
1550 bitmap_clear_bit (irreds
, bb
->index
);
1551 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1552 if (e
->flags
& EDGE_IRREDUCIBLE_LOOP
)
1553 e
->flags
|= EDGE_ALL_FLAGS
+ 1;
1557 mark_irreducible_loops ();
1560 FOR_EACH_BB_FN (bb
, cfun
)
1564 if ((bb
->flags
& BB_IRREDUCIBLE_LOOP
)
1565 && !bitmap_bit_p (irreds
, bb
->index
))
1567 error ("basic block %d should be marked irreducible", bb
->index
);
1570 else if (!(bb
->flags
& BB_IRREDUCIBLE_LOOP
)
1571 && bitmap_bit_p (irreds
, bb
->index
))
1573 error ("basic block %d should not be marked irreducible", bb
->index
);
1576 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1578 if ((e
->flags
& EDGE_IRREDUCIBLE_LOOP
)
1579 && !(e
->flags
& (EDGE_ALL_FLAGS
+ 1)))
1581 error ("edge from %d to %d should be marked irreducible",
1582 e
->src
->index
, e
->dest
->index
);
1585 else if (!(e
->flags
& EDGE_IRREDUCIBLE_LOOP
)
1586 && (e
->flags
& (EDGE_ALL_FLAGS
+ 1)))
1588 error ("edge from %d to %d should not be marked irreducible",
1589 e
->src
->index
, e
->dest
->index
);
1592 e
->flags
&= ~(EDGE_ALL_FLAGS
+ 1);
1597 /* Check the recorded loop exits. */
1598 FOR_EACH_LOOP (loop
, 0)
1600 if (!loop
->exits
|| loop
->exits
->e
!= NULL
)
1602 error ("corrupted head of the exits list of loop %d",
1608 /* Check that the list forms a cycle, and all elements except
1609 for the head are nonnull. */
1610 for (mexit
= loop
->exits
, exit
= mexit
->next
, i
= 0;
1611 exit
->e
&& exit
!= mexit
;
1615 mexit
= mexit
->next
;
1618 if (exit
!= loop
->exits
)
1620 error ("corrupted exits list of loop %d", loop
->num
);
1625 if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS
))
1627 if (loop
->exits
->next
!= loop
->exits
)
1629 error ("nonempty exits list of loop %d, but exits are not recorded",
1636 if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS
))
1638 unsigned n_exits
= 0, eloops
;
1640 sizes
= XCNEWVEC (unsigned, num
);
1641 memset (sizes
, 0, sizeof (unsigned) * num
);
1642 FOR_EACH_BB_FN (bb
, cfun
)
1645 if (bb
->loop_father
== current_loops
->tree_root
)
1647 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1649 if (flow_bb_inside_loop_p (bb
->loop_father
, e
->dest
))
1653 exit
= get_exit_descriptions (e
);
1656 error ("exit %d->%d not recorded",
1657 e
->src
->index
, e
->dest
->index
);
1661 for (; exit
; exit
= exit
->next_e
)
1664 for (loop
= bb
->loop_father
;
1665 loop
!= e
->dest
->loop_father
1666 /* When a loop exit is also an entry edge which
1667 can happen when avoiding CFG manipulations
1668 then the last loop exited is the outer loop
1669 of the loop entered. */
1670 && loop
!= loop_outer (e
->dest
->loop_father
);
1671 loop
= loop_outer (loop
))
1679 error ("wrong list of exited loops for edge %d->%d",
1680 e
->src
->index
, e
->dest
->index
);
1686 if (n_exits
!= current_loops
->exits
->elements ())
1688 error ("too many loop exits recorded");
1692 FOR_EACH_LOOP (loop
, 0)
1695 for (exit
= loop
->exits
->next
; exit
->e
; exit
= exit
->next
)
1697 if (eloops
!= sizes
[loop
->num
])
1699 error ("%d exits recorded for loop %d (having %d exits)",
1700 eloops
, loop
->num
, sizes
[loop
->num
]);
1711 free_dominance_info (CDI_DOMINATORS
);
1714 /* Returns latch edge of LOOP. */
1716 loop_latch_edge (const struct loop
*loop
)
1718 return find_edge (loop
->latch
, loop
->header
);
1721 /* Returns preheader edge of LOOP. */
1723 loop_preheader_edge (const struct loop
*loop
)
1728 gcc_assert (loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS
)
1729 && ! loops_state_satisfies_p (LOOPS_MAY_HAVE_MULTIPLE_LATCHES
));
1731 FOR_EACH_EDGE (e
, ei
, loop
->header
->preds
)
1732 if (e
->src
!= loop
->latch
)
1737 gcc_assert (! loop_outer (loop
));
1738 return single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
1744 /* Returns true if E is an exit of LOOP. */
1747 loop_exit_edge_p (const struct loop
*loop
, const_edge e
)
1749 return (flow_bb_inside_loop_p (loop
, e
->src
)
1750 && !flow_bb_inside_loop_p (loop
, e
->dest
));
1753 /* Returns the single exit edge of LOOP, or NULL if LOOP has either no exit
1754 or more than one exit. If loops do not have the exits recorded, NULL
1755 is returned always. */
1758 single_exit (const struct loop
*loop
)
1760 struct loop_exit
*exit
= loop
->exits
->next
;
1762 if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS
))
1765 if (exit
->e
&& exit
->next
== loop
->exits
)
1771 /* Returns true when BB has an incoming edge exiting LOOP. */
1774 loop_exits_to_bb_p (struct loop
*loop
, basic_block bb
)
1779 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1780 if (loop_exit_edge_p (loop
, e
))
1786 /* Returns true when BB has an outgoing edge exiting LOOP. */
1789 loop_exits_from_bb_p (struct loop
*loop
, basic_block bb
)
1794 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1795 if (loop_exit_edge_p (loop
, e
))
1801 /* Return location corresponding to the loop control condition if possible. */
1804 get_loop_location (struct loop
*loop
)
1806 rtx_insn
*insn
= NULL
;
1807 struct niter_desc
*desc
= NULL
;
1810 /* For a for or while loop, we would like to return the location
1811 of the for or while statement, if possible. To do this, look
1812 for the branch guarding the loop back-edge. */
1814 /* If this is a simple loop with an in_edge, then the loop control
1815 branch is typically at the end of its source. */
1816 desc
= get_simple_loop_desc (loop
);
1819 FOR_BB_INSNS_REVERSE (desc
->in_edge
->src
, insn
)
1821 if (INSN_P (insn
) && INSN_HAS_LOCATION (insn
))
1822 return INSN_LOCATION (insn
);
1825 /* If loop has a single exit, then the loop control branch
1826 must be at the end of its source. */
1827 if ((exit
= single_exit (loop
)))
1829 FOR_BB_INSNS_REVERSE (exit
->src
, insn
)
1831 if (INSN_P (insn
) && INSN_HAS_LOCATION (insn
))
1832 return INSN_LOCATION (insn
);
1835 /* Next check the latch, to see if it is non-empty. */
1836 FOR_BB_INSNS_REVERSE (loop
->latch
, insn
)
1838 if (INSN_P (insn
) && INSN_HAS_LOCATION (insn
))
1839 return INSN_LOCATION (insn
);
1841 /* Finally, if none of the above identifies the loop control branch,
1842 return the first location in the loop header. */
1843 FOR_BB_INSNS (loop
->header
, insn
)
1845 if (INSN_P (insn
) && INSN_HAS_LOCATION (insn
))
1846 return INSN_LOCATION (insn
);
1848 /* If all else fails, simply return the current function location. */
1849 return DECL_SOURCE_LOCATION (current_function_decl
);
1852 /* Records that every statement in LOOP is executed I_BOUND times.
1853 REALISTIC is true if I_BOUND is expected to be close to the real number
1854 of iterations. UPPER is true if we are sure the loop iterates at most
1858 record_niter_bound (struct loop
*loop
, const widest_int
&i_bound
,
1859 bool realistic
, bool upper
)
1861 /* Update the bounds only when there is no previous estimation, or when the
1862 current estimation is smaller. */
1864 && (!loop
->any_upper_bound
1865 || wi::ltu_p (i_bound
, loop
->nb_iterations_upper_bound
)))
1867 loop
->any_upper_bound
= true;
1868 loop
->nb_iterations_upper_bound
= i_bound
;
1869 if (!loop
->any_likely_upper_bound
)
1871 loop
->any_likely_upper_bound
= true;
1872 loop
->nb_iterations_likely_upper_bound
= i_bound
;
1876 && (!loop
->any_estimate
1877 || wi::ltu_p (i_bound
, loop
->nb_iterations_estimate
)))
1879 loop
->any_estimate
= true;
1880 loop
->nb_iterations_estimate
= i_bound
;
1883 && (!loop
->any_likely_upper_bound
1884 || wi::ltu_p (i_bound
, loop
->nb_iterations_likely_upper_bound
)))
1886 loop
->any_likely_upper_bound
= true;
1887 loop
->nb_iterations_likely_upper_bound
= i_bound
;
1890 /* If an upper bound is smaller than the realistic estimate of the
1891 number of iterations, use the upper bound instead. */
1892 if (loop
->any_upper_bound
1893 && loop
->any_estimate
1894 && wi::ltu_p (loop
->nb_iterations_upper_bound
,
1895 loop
->nb_iterations_estimate
))
1896 loop
->nb_iterations_estimate
= loop
->nb_iterations_upper_bound
;
1897 if (loop
->any_upper_bound
1898 && loop
->any_likely_upper_bound
1899 && wi::ltu_p (loop
->nb_iterations_upper_bound
,
1900 loop
->nb_iterations_likely_upper_bound
))
1901 loop
->nb_iterations_likely_upper_bound
= loop
->nb_iterations_upper_bound
;
1904 /* Similar to get_estimated_loop_iterations, but returns the estimate only
1905 if it fits to HOST_WIDE_INT. If this is not the case, or the estimate
1906 on the number of iterations of LOOP could not be derived, returns -1. */
1909 get_estimated_loop_iterations_int (struct loop
*loop
)
1912 HOST_WIDE_INT hwi_nit
;
1914 if (!get_estimated_loop_iterations (loop
, &nit
))
1917 if (!wi::fits_shwi_p (nit
))
1919 hwi_nit
= nit
.to_shwi ();
1921 return hwi_nit
< 0 ? -1 : hwi_nit
;
1924 /* Returns an upper bound on the number of executions of statements
1925 in the LOOP. For statements before the loop exit, this exceeds
1926 the number of execution of the latch by one. */
1929 max_stmt_executions_int (struct loop
*loop
)
1931 HOST_WIDE_INT nit
= get_max_loop_iterations_int (loop
);
1937 snit
= (HOST_WIDE_INT
) ((unsigned HOST_WIDE_INT
) nit
+ 1);
1939 /* If the computation overflows, return -1. */
1940 return snit
< 0 ? -1 : snit
;
1943 /* Returns an likely upper bound on the number of executions of statements
1944 in the LOOP. For statements before the loop exit, this exceeds
1945 the number of execution of the latch by one. */
1948 likely_max_stmt_executions_int (struct loop
*loop
)
1950 HOST_WIDE_INT nit
= get_likely_max_loop_iterations_int (loop
);
1956 snit
= (HOST_WIDE_INT
) ((unsigned HOST_WIDE_INT
) nit
+ 1);
1958 /* If the computation overflows, return -1. */
1959 return snit
< 0 ? -1 : snit
;
1962 /* Sets NIT to the estimated number of executions of the latch of the
1963 LOOP. If we have no reliable estimate, the function returns false, otherwise
1967 get_estimated_loop_iterations (struct loop
*loop
, widest_int
*nit
)
1969 /* Even if the bound is not recorded, possibly we can derrive one from
1971 if (!loop
->any_estimate
)
1973 if (loop
->header
->count
.reliable_p ())
1975 *nit
= gcov_type_to_wide_int
1976 (expected_loop_iterations_unbounded (loop
) + 1);
1982 *nit
= loop
->nb_iterations_estimate
;
1986 /* Sets NIT to an upper bound for the maximum number of executions of the
1987 latch of the LOOP. If we have no reliable estimate, the function returns
1988 false, otherwise returns true. */
1991 get_max_loop_iterations (const struct loop
*loop
, widest_int
*nit
)
1993 if (!loop
->any_upper_bound
)
1996 *nit
= loop
->nb_iterations_upper_bound
;
2000 /* Similar to get_max_loop_iterations, but returns the estimate only
2001 if it fits to HOST_WIDE_INT. If this is not the case, or the estimate
2002 on the number of iterations of LOOP could not be derived, returns -1. */
2005 get_max_loop_iterations_int (const struct loop
*loop
)
2008 HOST_WIDE_INT hwi_nit
;
2010 if (!get_max_loop_iterations (loop
, &nit
))
2013 if (!wi::fits_shwi_p (nit
))
2015 hwi_nit
= nit
.to_shwi ();
2017 return hwi_nit
< 0 ? -1 : hwi_nit
;
2020 /* Sets NIT to an upper bound for the maximum number of executions of the
2021 latch of the LOOP. If we have no reliable estimate, the function returns
2022 false, otherwise returns true. */
2025 get_likely_max_loop_iterations (struct loop
*loop
, widest_int
*nit
)
2027 if (!loop
->any_likely_upper_bound
)
2030 *nit
= loop
->nb_iterations_likely_upper_bound
;
2034 /* Similar to get_max_loop_iterations, but returns the estimate only
2035 if it fits to HOST_WIDE_INT. If this is not the case, or the estimate
2036 on the number of iterations of LOOP could not be derived, returns -1. */
2039 get_likely_max_loop_iterations_int (struct loop
*loop
)
2042 HOST_WIDE_INT hwi_nit
;
2044 if (!get_likely_max_loop_iterations (loop
, &nit
))
2047 if (!wi::fits_shwi_p (nit
))
2049 hwi_nit
= nit
.to_shwi ();
2051 return hwi_nit
< 0 ? -1 : hwi_nit
;
2054 /* Returns the loop depth of the loop BB belongs to. */
2057 bb_loop_depth (const_basic_block bb
)
2059 return bb
->loop_father
? loop_depth (bb
->loop_father
) : 0;
2062 /* Marks LOOP for removal and sets LOOPS_NEED_FIXUP. */
2065 mark_loop_for_removal (loop_p loop
)
2067 if (loop
->header
== NULL
)
2069 loop
->former_header
= loop
->header
;
2070 loop
->header
= NULL
;
2072 loops_state_set (LOOPS_NEED_FIXUP
);