* reload1.c (spill_failure): Dump failed reload data to dump file.
[official-gcc.git] / gcc / loop-invariant.c
blob687a9ece6d0d28e2ca99c23f31db4f4c0804e393
1 /* RTL-level loop invariant motion.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
21 /* This implements the loop invariant motion pass. It is very simple
22 (no calls, libcalls, etc.). This should be sufficient to cleanup things
23 like address arithmetics -- other more complicated invariants should be
24 eliminated on tree level either in tree-ssa-loop-im.c or in tree-ssa-pre.c.
26 We proceed loop by loop -- it is simpler than trying to handle things
27 globally and should not lose much. First we inspect all sets inside loop
28 and create a dependency graph on insns (saying "to move this insn, you must
29 also move the following insns").
31 We then need to determine what to move. We estimate the number of registers
32 used and move as many invariants as possible while we still have enough free
33 registers. We prefer the expensive invariants.
35 Then we move the selected invariants out of the loop, creating a new
36 temporaries for them if necessary. */
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "rtl.h"
43 #include "tm_p.h"
44 #include "hard-reg-set.h"
45 #include "obstack.h"
46 #include "basic-block.h"
47 #include "cfgloop.h"
48 #include "expr.h"
49 #include "output.h"
50 #include "function.h"
51 #include "flags.h"
52 #include "df.h"
54 /* The data stored for the loop. */
56 struct loop_data
58 struct loop *outermost_exit; /* The outermost exit of the loop. */
59 bool has_call; /* True if the loop contains a call. */
62 #define LOOP_DATA(LOOP) ((struct loop_data *) (LOOP)->aux)
64 /* The description of an use. */
66 struct use
68 rtx *pos; /* Position of the use. */
69 rtx insn; /* The insn in that the use occurs. */
71 struct use *next; /* Next use in the list. */
74 /* The description of a def. */
76 struct def
78 struct use *uses; /* The list of uses that are uniquely reached
79 by it. */
80 unsigned n_uses; /* Number of such uses. */
81 unsigned invno; /* The corresponding invariant. */
84 /* The data stored for each invariant. */
86 struct invariant
88 /* The number of the invariant. */
89 unsigned invno;
91 /* Whether we already processed the invariant. */
92 bool processed;
94 /* The definition of the invariant. */
95 struct def *def;
97 /* The insn in that it is defined. */
98 rtx insn;
100 /* Whether it is always executed. */
101 bool always_executed;
103 /* Whether to move the invariant. */
104 bool move;
106 /* Cost of the invariant. */
107 unsigned cost;
109 /* The invariants it depends on. */
110 bitmap depends_on;
112 /* Used for detecting already visited invariants during determining
113 costs of movements. */
114 unsigned stamp;
117 /* The actual stamp for marking already visited invariants during determining
118 costs of movements. */
120 static unsigned actual_stamp;
122 typedef struct invariant *invariant_p;
124 DEF_VEC_P(invariant_p);
125 DEF_VEC_ALLOC_P(invariant_p, heap);
127 /* The invariants. */
129 static VEC(invariant_p,heap) *invariants;
131 /* The dataflow object. */
133 static struct df *df;
135 /* Test for possibility of invariantness of X. */
137 static bool
138 check_maybe_invariant (rtx x)
140 enum rtx_code code = GET_CODE (x);
141 int i, j;
142 const char *fmt;
144 switch (code)
146 case CONST_INT:
147 case CONST_DOUBLE:
148 case SYMBOL_REF:
149 case CONST:
150 case LABEL_REF:
151 return true;
153 case PC:
154 case CC0:
155 case UNSPEC_VOLATILE:
156 case CALL:
157 return false;
159 case REG:
160 return true;
162 case MEM:
163 /* Load/store motion is done elsewhere. ??? Perhaps also add it here?
164 It should not be hard, and might be faster than "elsewhere". */
166 /* Just handle the most trivial case where we load from an unchanging
167 location (most importantly, pic tables). */
168 if (MEM_READONLY_P (x))
169 break;
171 return false;
173 case ASM_OPERANDS:
174 /* Don't mess with insns declared volatile. */
175 if (MEM_VOLATILE_P (x))
176 return false;
177 break;
179 default:
180 break;
183 fmt = GET_RTX_FORMAT (code);
184 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
186 if (fmt[i] == 'e')
188 if (!check_maybe_invariant (XEXP (x, i)))
189 return false;
191 else if (fmt[i] == 'E')
193 for (j = 0; j < XVECLEN (x, i); j++)
194 if (!check_maybe_invariant (XVECEXP (x, i, j)))
195 return false;
199 return true;
202 /* Determines the basic blocks inside LOOP that are always executed and
203 stores their bitmap to ALWAYS_REACHED. MAY_EXIT is a bitmap of
204 basic blocks that may either exit the loop, or contain the call that
205 does not have to return. BODY is body of the loop obtained by
206 get_loop_body_in_dom_order. */
208 static void
209 compute_always_reached (struct loop *loop, basic_block *body,
210 bitmap may_exit, bitmap always_reached)
212 unsigned i;
214 for (i = 0; i < loop->num_nodes; i++)
216 if (dominated_by_p (CDI_DOMINATORS, loop->latch, body[i]))
217 bitmap_set_bit (always_reached, i);
219 if (bitmap_bit_p (may_exit, i))
220 return;
224 /* Finds exits out of the LOOP with body BODY. Marks blocks in that we may
225 exit the loop by cfg edge to HAS_EXIT and MAY_EXIT. In MAY_EXIT
226 additionally mark blocks that may exit due to a call. */
228 static void
229 find_exits (struct loop *loop, basic_block *body,
230 bitmap may_exit, bitmap has_exit)
232 unsigned i;
233 edge_iterator ei;
234 edge e;
235 struct loop *outermost_exit = loop, *aexit;
236 bool has_call = false;
237 rtx insn;
239 for (i = 0; i < loop->num_nodes; i++)
241 if (body[i]->loop_father == loop)
243 FOR_BB_INSNS (body[i], insn)
245 if (CALL_P (insn)
246 && !CONST_OR_PURE_CALL_P (insn))
248 has_call = true;
249 bitmap_set_bit (may_exit, i);
250 break;
254 FOR_EACH_EDGE (e, ei, body[i]->succs)
256 if (flow_bb_inside_loop_p (loop, e->dest))
257 continue;
259 bitmap_set_bit (may_exit, i);
260 bitmap_set_bit (has_exit, i);
261 outermost_exit = find_common_loop (outermost_exit,
262 e->dest->loop_father);
264 continue;
267 /* Use the data stored for the subloop to decide whether we may exit
268 through it. It is sufficient to do this for header of the loop,
269 as other basic blocks inside it must be dominated by it. */
270 if (body[i]->loop_father->header != body[i])
271 continue;
273 if (LOOP_DATA (body[i]->loop_father)->has_call)
275 has_call = true;
276 bitmap_set_bit (may_exit, i);
278 aexit = LOOP_DATA (body[i]->loop_father)->outermost_exit;
279 if (aexit != loop)
281 bitmap_set_bit (may_exit, i);
282 bitmap_set_bit (has_exit, i);
284 if (flow_loop_nested_p (aexit, outermost_exit))
285 outermost_exit = aexit;
289 loop->aux = xcalloc (1, sizeof (struct loop_data));
290 LOOP_DATA (loop)->outermost_exit = outermost_exit;
291 LOOP_DATA (loop)->has_call = has_call;
294 /* Check whether we may assign a value to X from a register. */
296 static bool
297 may_assign_reg_p (rtx x)
299 return (can_copy_p (GET_MODE (x))
300 && (!REG_P (x)
301 || !HARD_REGISTER_P (x)
302 || REGNO_REG_CLASS (REGNO (x)) != NO_REGS));
305 /* Finds definitions that may correspond to invariants in LOOP with body
306 BODY. */
308 static void
309 find_defs (struct loop *loop, basic_block *body)
311 unsigned i;
312 bitmap blocks = BITMAP_ALLOC (NULL);
314 for (i = 0; i < loop->num_nodes; i++)
315 bitmap_set_bit (blocks, body[i]->index);
317 df_analyze_subcfg (df, blocks, DF_UD_CHAIN | DF_HARD_REGS | DF_EQUIV_NOTES);
318 BITMAP_FREE (blocks);
321 /* Creates a new invariant for definition DEF in INSN, depending on invariants
322 in DEPENDS_ON. ALWAYS_EXECUTED is true if the insn is always executed,
323 unless the program ends due to a function call. */
325 static void
326 create_new_invariant (struct def *def, rtx insn, bitmap depends_on,
327 bool always_executed)
329 struct invariant *inv = xmalloc (sizeof (struct invariant));
330 rtx set = single_set (insn);
332 inv->def = def;
333 inv->always_executed = always_executed;
334 inv->depends_on = depends_on;
336 /* If the set is simple, usually by moving it we move the whole store out of
337 the loop. Otherwise we save only cost of the computation. */
338 if (def)
339 inv->cost = rtx_cost (set, SET);
340 else
341 inv->cost = rtx_cost (SET_SRC (set), SET);
343 inv->move = false;
344 inv->processed = false;
345 inv->stamp = 0;
346 inv->insn = insn;
348 inv->invno = VEC_length (invariant_p, invariants);
349 if (def)
350 def->invno = inv->invno;
351 VEC_safe_push (invariant_p, heap, invariants, inv);
353 if (dump_file)
355 fprintf (dump_file,
356 "Set in insn %d is invariant (%d), cost %d, depends on ",
357 INSN_UID (insn), inv->invno, inv->cost);
358 dump_bitmap (dump_file, inv->depends_on);
362 /* Record USE at DEF. */
364 static void
365 record_use (struct def *def, rtx *use, rtx insn)
367 struct use *u = xmalloc (sizeof (struct use));
369 if (GET_CODE (*use) == SUBREG)
370 use = &SUBREG_REG (*use);
371 gcc_assert (REG_P (*use));
373 u->pos = use;
374 u->insn = insn;
375 u->next = def->uses;
376 def->uses = u;
377 def->n_uses++;
380 /* Finds the invariants INSN depends on and store them to the DEPENDS_ON
381 bitmap. */
383 static bool
384 check_dependencies (rtx insn, bitmap depends_on)
386 struct df_link *uses, *defs;
387 struct ref *use, *def;
388 basic_block bb = BLOCK_FOR_INSN (insn), def_bb;
389 struct def *def_data;
391 for (uses = DF_INSN_USES (df, insn); uses; uses = uses->next)
393 use = uses->ref;
395 defs = DF_REF_CHAIN (use);
396 if (!defs)
397 continue;
399 if (defs->next)
400 return false;
402 def = defs->ref;
403 def_data = DF_REF_DATA (def);
404 if (!def_data)
405 return false;
407 def_bb = DF_REF_BB (def);
408 if (!dominated_by_p (CDI_DOMINATORS, bb, def_bb))
409 return false;
411 bitmap_set_bit (depends_on, def_data->invno);
414 return true;
417 /* Finds invariant in INSN. ALWAYS_REACHED is true if the insn is always
418 executed. ALWAYS_EXECUTED is true if the insn is always executed,
419 unless the program ends due to a function call. */
421 static void
422 find_invariant_insn (rtx insn, bool always_reached, bool always_executed)
424 struct ref *ref;
425 struct def *def;
426 bitmap depends_on;
427 rtx set, dest;
428 bool simple = true;
430 /* Until we get rid of LIBCALLS. */
431 if (find_reg_note (insn, REG_RETVAL, NULL_RTX)
432 || find_reg_note (insn, REG_LIBCALL, NULL_RTX)
433 || find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
434 return;
436 set = single_set (insn);
437 if (!set)
438 return;
439 dest = SET_DEST (set);
441 if (!REG_P (dest)
442 || HARD_REGISTER_P (dest))
443 simple = false;
445 if (!may_assign_reg_p (SET_DEST (set))
446 || !check_maybe_invariant (SET_SRC (set)))
447 return;
449 if (may_trap_p (PATTERN (insn)))
451 if (!always_reached)
452 return;
454 /* Unless the exceptions are handled, the behavior is undefined
455 if the trap occurs. */
456 if (flag_non_call_exceptions)
457 return;
460 depends_on = BITMAP_ALLOC (NULL);
461 if (!check_dependencies (insn, depends_on))
463 BITMAP_FREE (depends_on);
464 return;
467 if (simple)
469 ref = df_find_def (df, insn, dest);
470 def = xcalloc (1, sizeof (struct def));
471 DF_REF_DATA (ref) = def;
473 else
474 def = NULL;
476 create_new_invariant (def, insn, depends_on, always_executed);
479 /* Record registers used in INSN that have a unique invariant definition. */
481 static void
482 record_uses (rtx insn)
484 struct df_link *uses, *defs;
485 struct ref *use, *def;
486 basic_block bb = BLOCK_FOR_INSN (insn), def_bb;
488 for (uses = DF_INSN_USES (df, insn); uses; uses = uses->next)
490 use = uses->ref;
492 defs = DF_REF_CHAIN (use);
493 if (!defs || defs->next)
494 continue;
495 def = defs->ref;
496 if (!DF_REF_DATA (def))
497 continue;
499 def_bb = DF_REF_BB (def);
500 if (!dominated_by_p (CDI_DOMINATORS, bb, def_bb))
501 continue;
503 record_use (DF_REF_DATA (def), DF_REF_LOC (use), DF_REF_INSN (use));
507 /* Finds invariants in INSN. ALWAYS_REACHED is true if the insn is always
508 executed. ALWAYS_EXECUTED is true if the insn is always executed,
509 unless the program ends due to a function call. */
511 static void
512 find_invariants_insn (rtx insn, bool always_reached, bool always_executed)
514 find_invariant_insn (insn, always_reached, always_executed);
515 record_uses (insn);
518 /* Finds invariants in basic block BB. ALWAYS_REACHED is true if the
519 basic block is always executed. ALWAYS_EXECUTED is true if the basic
520 block is always executed, unless the program ends due to a function
521 call. */
523 static void
524 find_invariants_bb (basic_block bb, bool always_reached, bool always_executed)
526 rtx insn;
528 FOR_BB_INSNS (bb, insn)
530 if (!INSN_P (insn))
531 continue;
533 find_invariants_insn (insn, always_reached, always_executed);
535 if (always_reached
536 && CALL_P (insn)
537 && !CONST_OR_PURE_CALL_P (insn))
538 always_reached = false;
542 /* Finds invariants in LOOP with body BODY. ALWAYS_REACHED is the bitmap of
543 basic blocks in BODY that are always executed. ALWAYS_EXECUTED is the
544 bitmap of basic blocks in BODY that are always executed unless the program
545 ends due to a function call. */
547 static void
548 find_invariants_body (struct loop *loop, basic_block *body,
549 bitmap always_reached, bitmap always_executed)
551 unsigned i;
553 for (i = 0; i < loop->num_nodes; i++)
554 find_invariants_bb (body[i],
555 bitmap_bit_p (always_reached, i),
556 bitmap_bit_p (always_executed, i));
559 /* Finds invariants in LOOP. */
561 static void
562 find_invariants (struct loop *loop)
564 bitmap may_exit = BITMAP_ALLOC (NULL);
565 bitmap always_reached = BITMAP_ALLOC (NULL);
566 bitmap has_exit = BITMAP_ALLOC (NULL);
567 bitmap always_executed = BITMAP_ALLOC (NULL);
568 basic_block *body = get_loop_body_in_dom_order (loop);
570 find_exits (loop, body, may_exit, has_exit);
571 compute_always_reached (loop, body, may_exit, always_reached);
572 compute_always_reached (loop, body, has_exit, always_executed);
574 find_defs (loop, body);
575 find_invariants_body (loop, body, always_reached, always_executed);
577 BITMAP_FREE (always_reached);
578 BITMAP_FREE (always_executed);
579 BITMAP_FREE (may_exit);
580 BITMAP_FREE (has_exit);
581 free (body);
584 /* Frees a list of uses USE. */
586 static void
587 free_use_list (struct use *use)
589 struct use *next;
591 for (; use; use = next)
593 next = use->next;
594 free (use);
598 /* Calculates cost and number of registers needed for moving invariant INV
599 out of the loop and stores them to *COST and *REGS_NEEDED. */
601 static void
602 get_inv_cost (struct invariant *inv, int *comp_cost, unsigned *regs_needed)
604 int acomp_cost;
605 unsigned aregs_needed;
606 unsigned depno;
607 struct invariant *dep;
608 bitmap_iterator bi;
610 *comp_cost = 0;
611 *regs_needed = 0;
612 if (inv->move
613 || inv->stamp == actual_stamp)
614 return;
615 inv->stamp = actual_stamp;
617 (*regs_needed)++;
618 (*comp_cost) += inv->cost;
620 EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, depno, bi)
622 dep = VEC_index (invariant_p, invariants, depno);
624 get_inv_cost (dep, &acomp_cost, &aregs_needed);
626 if (aregs_needed
627 /* We need to check always_executed, since if the original value of
628 the invariant may be preserved, we may need to keep it in a
629 separate register. TODO check whether the register has an
630 use outside of the loop. */
631 && dep->always_executed
632 && !dep->def->uses->next)
634 /* If this is a single use, after moving the dependency we will not
635 need a new register. */
636 aregs_needed--;
639 (*regs_needed) += aregs_needed;
640 (*comp_cost) += acomp_cost;
644 /* Calculates gain for eliminating invariant INV. REGS_USED is the number
645 of registers used in the loop, N_INV_USES is the number of uses of
646 invariants, NEW_REGS is the number of new variables already added due to
647 the invariant motion. The number of registers needed for it is stored in
648 *REGS_NEEDED. */
650 static int
651 gain_for_invariant (struct invariant *inv, unsigned *regs_needed,
652 unsigned new_regs, unsigned regs_used, unsigned n_inv_uses)
654 int comp_cost, size_cost;
656 get_inv_cost (inv, &comp_cost, regs_needed);
657 actual_stamp++;
659 size_cost = (global_cost_for_size (new_regs + *regs_needed,
660 regs_used, n_inv_uses)
661 - global_cost_for_size (new_regs, regs_used, n_inv_uses));
663 return comp_cost - size_cost;
666 /* Finds invariant with best gain for moving. Returns the gain, stores
667 the invariant in *BEST and number of registers needed for it to
668 *REGS_NEEDED. REGS_USED is the number of registers used in
669 the loop, N_INV_USES is the number of uses of invariants. NEW_REGS
670 is the number of new variables already added due to invariant motion. */
672 static int
673 best_gain_for_invariant (struct invariant **best, unsigned *regs_needed,
674 unsigned new_regs, unsigned regs_used,
675 unsigned n_inv_uses)
677 struct invariant *inv;
678 int gain = 0, again;
679 unsigned aregs_needed, invno;
681 for (invno = 0; VEC_iterate (invariant_p, invariants, invno, inv); invno++)
683 if (inv->move)
684 continue;
686 again = gain_for_invariant (inv, &aregs_needed,
687 new_regs, regs_used, n_inv_uses);
688 if (again > gain)
690 gain = again;
691 *best = inv;
692 *regs_needed = aregs_needed;
696 return gain;
699 /* Marks invariant INVNO and all its dependencies for moving. */
701 static void
702 set_move_mark (unsigned invno)
704 struct invariant *inv = VEC_index (invariant_p, invariants, invno);
705 bitmap_iterator bi;
707 if (inv->move)
708 return;
709 inv->move = true;
711 if (dump_file)
712 fprintf (dump_file, "Decided to move invariant %d\n", invno);
714 EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, invno, bi)
716 set_move_mark (invno);
720 /* Determines which invariants to move. */
722 static void
723 find_invariants_to_move (void)
725 unsigned i, regs_used, n_inv_uses, regs_needed = 0, new_regs;
726 struct invariant *inv = NULL;
728 if (!VEC_length (invariant_p, invariants))
729 return;
731 /* Now something slightly more involved. First estimate the number of used
732 registers. */
733 n_inv_uses = 0;
735 /* We do not really do a good job in this estimation; put some initial bound
736 here to stand for induction variables etc. that we do not detect. */
737 regs_used = 2;
739 for (i = 0; i < df->n_regs; i++)
741 if (!DF_REGNO_FIRST_DEF (df, i) && DF_REGNO_LAST_USE (df, i))
743 /* This is a value that is used but not changed inside loop. */
744 regs_used++;
748 for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
750 if (inv->def)
751 n_inv_uses += inv->def->n_uses;
754 new_regs = 0;
755 while (best_gain_for_invariant (&inv, &regs_needed,
756 new_regs, regs_used, n_inv_uses) > 0)
758 set_move_mark (inv->invno);
759 new_regs += regs_needed;
763 /* Move invariant INVNO out of the LOOP. */
765 static void
766 move_invariant_reg (struct loop *loop, unsigned invno)
768 struct invariant *inv = VEC_index (invariant_p, invariants, invno);
769 unsigned i;
770 basic_block preheader = loop_preheader_edge (loop)->src;
771 rtx reg, set;
772 struct use *use;
773 bitmap_iterator bi;
775 if (inv->processed)
776 return;
777 inv->processed = true;
779 if (inv->depends_on)
781 EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, i, bi)
783 move_invariant_reg (loop, i);
787 /* Move the set out of the loop. If the set is always executed (we could
788 omit this condition if we know that the register is unused outside of the
789 loop, but it does not seem worth finding out) and it has no uses that
790 would not be dominated by it, we may just move it (TODO). Otherwise we
791 need to create a temporary register. */
792 set = single_set (inv->insn);
793 reg = gen_reg_rtx (GET_MODE (SET_DEST (set)));
794 df_pattern_emit_after (df, gen_move_insn (SET_DEST (set), reg),
795 BLOCK_FOR_INSN (inv->insn), inv->insn);
797 /* If the SET_DEST of the invariant insn is a reg, we can just move
798 the insn out of the loop. Otherwise, we have to use gen_move_insn
799 to let emit_move_insn produce a valid instruction stream. */
800 if (REG_P (SET_DEST (set)))
802 SET_DEST (set) = reg;
803 reorder_insns (inv->insn, inv->insn, BB_END (preheader));
804 df_insn_modify (df, preheader, inv->insn);
806 else
808 df_pattern_emit_after (df, gen_move_insn (reg, SET_SRC (set)),
809 preheader, BB_END (preheader));
810 df_insn_delete (df, BLOCK_FOR_INSN (inv->insn), inv->insn);
813 /* Replace the uses we know to be dominated. It saves work for copy
814 propagation, and also it is necessary so that dependent invariants
815 are computed right. */
816 if (inv->def)
818 for (use = inv->def->uses; use; use = use->next)
820 *use->pos = reg;
821 df_insn_modify (df, BLOCK_FOR_INSN (use->insn), use->insn);
826 /* Move selected invariant out of the LOOP. Newly created regs are marked
827 in TEMPORARY_REGS. */
829 static void
830 move_invariants (struct loop *loop)
832 struct invariant *inv;
833 unsigned i;
835 for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
837 if (inv->move)
838 move_invariant_reg (loop, i);
842 /* Initializes invariant motion data. */
844 static void
845 init_inv_motion_data (void)
847 actual_stamp = 1;
849 invariants = VEC_alloc (invariant_p, heap, 100);
852 /* Frees the data allocated by invariant motion. */
854 static void
855 free_inv_motion_data (void)
857 unsigned i;
858 struct def *def;
859 struct invariant *inv;
861 for (i = 0; i < df->n_defs; i++)
863 if (!df->defs[i])
864 continue;
866 def = DF_REF_DATA (df->defs[i]);
867 if (!def)
868 continue;
870 free_use_list (def->uses);
871 free (def);
872 DF_REF_DATA (df->defs[i]) = NULL;
875 for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
877 BITMAP_FREE (inv->depends_on);
878 free (inv);
880 VEC_free (invariant_p, heap, invariants);
883 /* Move the invariants out of the LOOP. */
885 static void
886 move_single_loop_invariants (struct loop *loop)
888 init_inv_motion_data ();
890 find_invariants (loop);
891 find_invariants_to_move ();
892 move_invariants (loop);
894 free_inv_motion_data ();
897 /* Releases the auxiliary data for LOOP. */
899 static void
900 free_loop_data (struct loop *loop)
902 struct loop_data *data = LOOP_DATA (loop);
904 free (data);
905 loop->aux = NULL;
908 /* Move the invariants out of the LOOPS. */
910 void
911 move_loop_invariants (struct loops *loops)
913 struct loop *loop;
914 unsigned i;
916 df = df_init ();
918 /* Process the loops, innermost first. */
919 loop = loops->tree_root;
920 while (loop->inner)
921 loop = loop->inner;
923 while (loop != loops->tree_root)
925 move_single_loop_invariants (loop);
927 if (loop->next)
929 loop = loop->next;
930 while (loop->inner)
931 loop = loop->inner;
933 else
934 loop = loop->outer;
937 for (i = 1; i < loops->num; i++)
938 if (loops->parray[i])
939 free_loop_data (loops->parray[i]);
941 df_finish (df);
942 df = NULL;
944 #ifdef ENABLE_CHECKING
945 verify_flow_info ();
946 #endif