* tree-cfg.c (tree_find_edge_insert_loc): Handle naked RETURN_EXPR.
[official-gcc.git] / gcc / loop-invariant.c
blob7765a0eda3572c5a8e7d1f0acccc18eaeba06605
1 /* Rtl-level loop invariant motion.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
21 /* This implements the loop invariant motion pass. It is very simple
22 (no calls, libcalls, etc.). This should be sufficient to cleanup things like
23 address arithmetics -- other more complicated invariants should be
24 eliminated on tree level either in tree-ssa-loop-im.c or in tree-ssa-pre.c.
26 We proceed loop by loop -- it is simpler than trying to handle things
27 globally and should not lose much. First we inspect all sets inside loop
28 and create a dependency graph on insns (saying "to move this insn, you must
29 also move the following insns").
31 We then need to determine what to move. We estimate the number of registers
32 used and move as many invariants as possible while we still have enough free
33 registers. We prefer the expensive invariants.
35 Then we move the selected invariants out of the loop, creating a new
36 temporaries for them if necessary. */
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "rtl.h"
43 #include "hard-reg-set.h"
44 #include "obstack.h"
45 #include "basic-block.h"
46 #include "cfgloop.h"
47 #include "expr.h"
48 #include "output.h"
49 #include "function.h"
50 #include "flags.h"
51 #include "df.h"
53 /* The data stored for the loop. */
55 struct loop_data
57 struct loop *outermost_exit; /* The outermost exit of the loop. */
58 bool has_call; /* True if the loop contains a call. */
61 #define LOOP_DATA(LOOP) ((struct loop_data *) (LOOP)->aux)
63 /* The description of an use. */
65 struct use
67 rtx *pos; /* Position of the use. */
68 rtx insn; /* The insn in that the use occurs. */
70 struct use *next; /* Next use in the list. */
73 /* The description of a def. */
75 struct def
77 struct use *uses; /* The list of uses that are uniquely reached
78 by it. */
79 unsigned n_uses; /* Number of such uses. */
80 unsigned invno; /* The corresponding invariant. */
83 /* The data stored for each invariant. */
85 struct invariant
87 /* The number of the invariant. */
88 unsigned invno;
90 /* Whether we already processed the invariant. */
91 bool processed;
93 /* The definition of the invariant. */
94 struct def *def;
96 /* The insn in that it is defined. */
97 rtx insn;
99 /* Whether it is always executed. */
100 bool always_executed;
102 /* Whether to move the invariant. */
103 bool move;
105 /* Cost if the invariant. */
106 unsigned cost;
108 /* The invariants it depends on. */
109 bitmap depends_on;
111 /* Used for detecting already visited invariants during determining
112 costs of movements. */
113 unsigned stamp;
116 /* The actual stamp for marking already visited invariants during determining
117 costs of movements. */
119 static unsigned actual_stamp;
121 typedef struct invariant *invariant_p;
123 DEF_VEC_P(invariant_p);
124 DEF_VEC_ALLOC_P(invariant_p, heap);
126 /* The invariants. */
128 static VEC(invariant_p,heap) *invariants;
130 /* Test for possibility of invariantness of X. */
132 static bool
133 check_maybe_invariant (rtx x)
135 enum rtx_code code = GET_CODE (x);
136 int i, j;
137 const char *fmt;
139 switch (code)
141 case CONST_INT:
142 case CONST_DOUBLE:
143 case SYMBOL_REF:
144 case CONST:
145 case LABEL_REF:
146 return true;
148 case PC:
149 case CC0:
150 case UNSPEC_VOLATILE:
151 case CALL:
152 return false;
154 case REG:
155 return true;
157 case MEM:
158 /* Load/store motion is done elsewhere. ??? Perhaps also add it here?
159 It should not be hard, and might be faster than "elsewhere". */
161 /* Just handle the most trivial case where we load from an unchanging
162 location (most importantly, pic tables). */
163 if (MEM_READONLY_P (x))
164 break;
166 return false;
168 case ASM_OPERANDS:
169 /* Don't mess with insns declared volatile. */
170 if (MEM_VOLATILE_P (x))
171 return false;
172 break;
174 default:
175 break;
178 fmt = GET_RTX_FORMAT (code);
179 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
181 if (fmt[i] == 'e')
183 if (!check_maybe_invariant (XEXP (x, i)))
184 return false;
186 else if (fmt[i] == 'E')
188 for (j = 0; j < XVECLEN (x, i); j++)
189 if (!check_maybe_invariant (XVECEXP (x, i, j)))
190 return false;
194 return true;
197 /* Determines the basic blocks inside LOOP that are always executed and
198 stores their bitmap to ALWAYS_REACHED. MAY_EXIT is a bitmap of
199 basic blocks that may either exit the loop, or contain the call that
200 does not have to return. BODY is body of the loop obtained by
201 get_loop_body_in_dom_order. */
203 static void
204 compute_always_reached (struct loop *loop, basic_block *body,
205 bitmap may_exit, bitmap always_reached)
207 unsigned i;
209 for (i = 0; i < loop->num_nodes; i++)
211 if (dominated_by_p (CDI_DOMINATORS, loop->latch, body[i]))
212 bitmap_set_bit (always_reached, i);
214 if (bitmap_bit_p (may_exit, i))
215 return;
219 /* Finds exits out of the LOOP with body BODY. Marks blocks in that we may
220 exit the loop by cfg edge to HAS_EXIT and MAY_EXIT. In MAY_EXIT
221 additionally mark blocks that may exit due to a call. */
223 static void
224 find_exits (struct loop *loop, basic_block *body,
225 bitmap may_exit, bitmap has_exit)
227 unsigned i;
228 edge_iterator ei;
229 edge e;
230 struct loop *outermost_exit = loop, *aexit;
231 bool has_call = false;
232 rtx insn;
234 for (i = 0; i < loop->num_nodes; i++)
236 if (body[i]->loop_father == loop)
238 FOR_BB_INSNS (body[i], insn)
240 if (CALL_P (insn)
241 && !CONST_OR_PURE_CALL_P (insn))
243 has_call = true;
244 bitmap_set_bit (may_exit, i);
245 break;
249 FOR_EACH_EDGE (e, ei, body[i]->succs)
251 if (flow_bb_inside_loop_p (loop, e->dest))
252 continue;
254 bitmap_set_bit (may_exit, i);
255 bitmap_set_bit (has_exit, i);
256 outermost_exit = find_common_loop (outermost_exit,
257 e->dest->loop_father);
259 continue;
262 /* Use the data stored for the subloop to decide whether we may exit
263 through it. It is sufficient to do this for header of the loop,
264 as other basic blocks inside it must be dominated by it. */
265 if (body[i]->loop_father->header != body[i])
266 continue;
268 if (LOOP_DATA (body[i]->loop_father)->has_call)
270 has_call = true;
271 bitmap_set_bit (may_exit, i);
273 aexit = LOOP_DATA (body[i]->loop_father)->outermost_exit;
274 if (aexit != loop)
276 bitmap_set_bit (may_exit, i);
277 bitmap_set_bit (has_exit, i);
279 if (flow_loop_nested_p (aexit, outermost_exit))
280 outermost_exit = aexit;
284 loop->aux = xcalloc (1, sizeof (struct loop_data));
285 LOOP_DATA (loop)->outermost_exit = outermost_exit;
286 LOOP_DATA (loop)->has_call = has_call;
289 /* Check whether we may assign a value to X from a register. */
291 static bool
292 may_assign_reg_p (rtx x)
294 return can_copy_p (GET_MODE (x));
297 /* Finds definitions that may correspond to invariants in LOOP with body BODY.
298 DF is the dataflow object. */
300 static void
301 find_defs (struct loop *loop, basic_block *body, struct df *df)
303 unsigned i;
304 bitmap blocks = BITMAP_ALLOC (NULL);
306 for (i = 0; i < loop->num_nodes; i++)
307 bitmap_set_bit (blocks, body[i]->index);
309 df_analyze_subcfg (df, blocks, DF_UD_CHAIN | DF_HARD_REGS | DF_EQUIV_NOTES);
310 BITMAP_FREE (blocks);
313 /* Creates a new invariant for definition DEF in INSN, depending on invariants
314 in DEPENDS_ON. ALWAYS_EXECUTED is true if the insn is always executed,
315 unless the program ends due to a function call. */
317 static void
318 create_new_invariant (struct def *def, rtx insn, bitmap depends_on,
319 bool always_executed)
321 struct invariant *inv = xmalloc (sizeof (struct invariant));
322 rtx set = single_set (insn);
324 inv->def = def;
325 inv->always_executed = always_executed;
326 inv->depends_on = depends_on;
328 /* If the set is simple, usually by moving it we move the whole store out of
329 the loop. Otherwise we save only cost of the computation. */
330 if (def)
331 inv->cost = rtx_cost (set, SET);
332 else
333 inv->cost = rtx_cost (SET_SRC (set), SET);
335 inv->move = false;
336 inv->processed = false;
337 inv->stamp = 0;
338 inv->insn = insn;
340 inv->invno = VEC_length (invariant_p, invariants);
341 if (def)
342 def->invno = inv->invno;
343 VEC_safe_push (invariant_p, heap, invariants, inv);
345 if (dump_file)
347 fprintf (dump_file,
348 "Set in insn %d is invariant (%d), cost %d, depends on ",
349 INSN_UID (insn), inv->invno, inv->cost);
350 dump_bitmap (dump_file, inv->depends_on);
354 /* Record USE at DEF. */
356 static void
357 record_use (struct def *def, rtx *use, rtx insn)
359 struct use *u = xmalloc (sizeof (struct use));
361 if (GET_CODE (*use) == SUBREG)
362 use = &SUBREG_REG (*use);
363 gcc_assert (REG_P (*use));
365 u->pos = use;
366 u->insn = insn;
367 u->next = def->uses;
368 def->uses = u;
369 def->n_uses++;
372 /* Finds the invariants INSN depends on and store them to the DEPENDS_ON
373 bitmap. DF is the dataflow object. */
375 static bool
376 check_dependencies (rtx insn, struct df *df, bitmap depends_on)
378 struct df_link *uses, *defs;
379 struct ref *use, *def;
380 basic_block bb = BLOCK_FOR_INSN (insn), def_bb;
381 struct def *def_data;
383 for (uses = DF_INSN_USES (df, insn); uses; uses = uses->next)
385 use = uses->ref;
387 defs = DF_REF_CHAIN (use);
388 if (!defs)
389 continue;
391 if (defs->next)
392 return false;
394 def = defs->ref;
395 def_data = DF_REF_DATA (def);
396 if (!def_data)
397 return false;
399 def_bb = DF_REF_BB (def);
400 if (!dominated_by_p (CDI_DOMINATORS, bb, def_bb))
401 return false;
403 bitmap_set_bit (depends_on, def_data->invno);
406 return true;
409 /* Finds invariant in INSN. ALWAYS_REACHED is true if the insn is always
410 executed. ALWAYS_EXECUTED is true if the insn is always executed,
411 unless the program ends due to a function call. DF is the dataflow
412 object. */
414 static void
415 find_invariant_insn (rtx insn, bool always_reached, bool always_executed,
416 struct df *df)
418 struct ref *ref;
419 struct def *def;
420 bitmap depends_on;
421 rtx set, dest;
422 bool simple = true;
424 /* Until we get rid of LIBCALLS. */
425 if (find_reg_note (insn, REG_RETVAL, NULL_RTX)
426 || find_reg_note (insn, REG_LIBCALL, NULL_RTX)
427 || find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
428 return;
430 set = single_set (insn);
431 if (!set)
432 return;
433 dest = SET_DEST (set);
435 if (!REG_P (dest)
436 || HARD_REGISTER_P (dest))
437 simple = false;
439 if (!check_maybe_invariant (SET_SRC (set))
440 || !may_assign_reg_p (SET_DEST (set)))
441 return;
443 if (may_trap_p (PATTERN (insn)))
445 if (!always_reached)
446 return;
448 /* Unless the exceptions are handled, the behavior is undefined
449 if the trap occurs. */
450 if (flag_non_call_exceptions)
451 return;
454 depends_on = BITMAP_ALLOC (NULL);
455 if (!check_dependencies (insn, df, depends_on))
457 BITMAP_FREE (depends_on);
458 return;
461 if (simple)
463 ref = df_find_def (df, insn, dest);
464 def = xcalloc (1, sizeof (struct def));
465 DF_REF_DATA (ref) = def;
467 else
468 def = NULL;
470 create_new_invariant (def, insn, depends_on, always_executed);
473 /* Record registers used in INSN that have a unique invariant definition.
474 DF is the dataflow object. */
476 static void
477 record_uses (rtx insn, struct df *df)
479 struct df_link *uses, *defs;
480 struct ref *use, *def;
481 basic_block bb = BLOCK_FOR_INSN (insn), def_bb;
483 for (uses = DF_INSN_USES (df, insn); uses; uses = uses->next)
485 use = uses->ref;
487 defs = DF_REF_CHAIN (use);
488 if (!defs || defs->next)
489 continue;
490 def = defs->ref;
491 if (!DF_REF_DATA (def))
492 continue;
494 def_bb = DF_REF_BB (def);
495 if (!dominated_by_p (CDI_DOMINATORS, bb, def_bb))
496 continue;
498 record_use (DF_REF_DATA (def), DF_REF_LOC (use), DF_REF_INSN (use));
502 /* Finds invariants in INSN. ALWAYS_REACHED is true if the insn is always
503 executed. ALWAYS_EXECUTED is true if the insn is always executed,
504 unless the program ends due to a function call. DF is the dataflow
505 object. */
507 static void
508 find_invariants_insn (rtx insn, bool always_reached, bool always_executed,
509 struct df *df)
511 find_invariant_insn (insn, always_reached, always_executed, df);
512 record_uses (insn, df);
515 /* Finds invariants in basic block BB. ALWAYS_REACHED is true if the
516 basic block is always executed. ALWAYS_EXECUTED is true if the basic
517 block is always executed, unless the program ends due to a function
518 call. DF is the dataflow object. */
520 static void
521 find_invariants_bb (basic_block bb, bool always_reached, bool always_executed,
522 struct df *df)
524 rtx insn;
526 FOR_BB_INSNS (bb, insn)
528 if (!INSN_P (insn))
529 continue;
531 find_invariants_insn (insn, always_reached, always_executed, df);
533 if (always_reached
534 && CALL_P (insn)
535 && !CONST_OR_PURE_CALL_P (insn))
536 always_reached = false;
540 /* Finds invariants in LOOP with body BODY. ALWAYS_REACHED is the bitmap of
541 basic blocks in BODY that are always executed. ALWAYS_EXECUTED is the
542 bitmap of basic blocks in BODY that are always executed unless the program
543 ends due to a function call. DF is the dataflow object. */
545 static void
546 find_invariants_body (struct loop *loop, basic_block *body,
547 bitmap always_reached, bitmap always_executed,
548 struct df *df)
550 unsigned i;
552 for (i = 0; i < loop->num_nodes; i++)
553 find_invariants_bb (body[i],
554 bitmap_bit_p (always_reached, i),
555 bitmap_bit_p (always_executed, i),
556 df);
559 /* Finds invariants in LOOP. DF is the dataflow object. */
561 static void
562 find_invariants (struct loop *loop, struct df *df)
564 bitmap may_exit = BITMAP_ALLOC (NULL);
565 bitmap always_reached = BITMAP_ALLOC (NULL);
566 bitmap has_exit = BITMAP_ALLOC (NULL);
567 bitmap always_executed = BITMAP_ALLOC (NULL);
568 basic_block *body = get_loop_body_in_dom_order (loop);
570 find_exits (loop, body, may_exit, has_exit);
571 compute_always_reached (loop, body, may_exit, always_reached);
572 compute_always_reached (loop, body, has_exit, always_executed);
574 find_defs (loop, body, df);
575 find_invariants_body (loop, body, always_reached, always_executed, df);
577 BITMAP_FREE (always_reached);
578 BITMAP_FREE (always_executed);
579 BITMAP_FREE (may_exit);
580 BITMAP_FREE (has_exit);
581 free (body);
584 /* Frees a list of uses USE. */
586 static void
587 free_use_list (struct use *use)
589 struct use *next;
591 for (; use; use = next)
593 next = use->next;
594 free (use);
598 /* Calculates cost and number of registers needed for moving invariant INV
599 out of the loop and stores them to *COST and *REGS_NEEDED. */
601 static void
602 get_inv_cost (struct invariant *inv, int *comp_cost, unsigned *regs_needed)
604 int acomp_cost;
605 unsigned aregs_needed;
606 unsigned depno;
607 struct invariant *dep;
608 bitmap_iterator bi;
610 *comp_cost = 0;
611 *regs_needed = 0;
612 if (inv->move
613 || inv->stamp == actual_stamp)
614 return;
615 inv->stamp = actual_stamp;
617 (*regs_needed)++;
618 (*comp_cost) += inv->cost;
620 EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, depno, bi)
622 dep = VEC_index (invariant_p, invariants, depno);
624 get_inv_cost (dep, &acomp_cost, &aregs_needed);
626 if (aregs_needed
627 /* We need to check always_executed, since if the original value of
628 the invariant may be preserved, we may need to keep it in a
629 separate register. TODO check whether the register has an
630 use outside of the loop. */
631 && dep->always_executed
632 && !dep->def->uses->next)
634 /* If this is a single use, after moving the dependency we will not
635 need a new register. */
636 aregs_needed--;
639 (*regs_needed) += aregs_needed;
640 (*comp_cost) += acomp_cost;
644 /* Calculates gain for eliminating invariant INV. REGS_USED is the number
645 of registers used in the loop, N_INV_USES is the number of uses of
646 invariants, NEW_REGS is the number of new variables already added due to
647 the invariant motion. The number of registers needed for it is stored in
648 *REGS_NEEDED. */
650 static int
651 gain_for_invariant (struct invariant *inv, unsigned *regs_needed,
652 unsigned new_regs, unsigned regs_used, unsigned n_inv_uses)
654 int comp_cost, size_cost;
656 get_inv_cost (inv, &comp_cost, regs_needed);
657 actual_stamp++;
659 size_cost = (global_cost_for_size (new_regs + *regs_needed,
660 regs_used, n_inv_uses)
661 - global_cost_for_size (new_regs, regs_used, n_inv_uses));
663 return comp_cost - size_cost;
666 /* Finds invariant with best gain for moving. Returns the gain, stores
667 the invariant in *BEST and number of registers needed for it to
668 *REGS_NEEDED. REGS_USED is the number of registers used in
669 the loop, N_INV_USES is the number of uses of invariants. NEW_REGS
670 is the number of new variables already added due to invariant motion. */
672 static int
673 best_gain_for_invariant (struct invariant **best, unsigned *regs_needed,
674 unsigned new_regs, unsigned regs_used,
675 unsigned n_inv_uses)
677 struct invariant *inv;
678 int gain = 0, again;
679 unsigned aregs_needed, invno;
681 for (invno = 0; VEC_iterate (invariant_p, invariants, invno, inv); invno++)
683 if (inv->move)
684 continue;
686 again = gain_for_invariant (inv, &aregs_needed,
687 new_regs, regs_used, n_inv_uses);
688 if (again > gain)
690 gain = again;
691 *best = inv;
692 *regs_needed = aregs_needed;
696 return gain;
699 /* Marks invariant INVNO and all its dependencies for moving. */
701 static void
702 set_move_mark (unsigned invno)
704 struct invariant *inv = VEC_index (invariant_p, invariants, invno);
705 bitmap_iterator bi;
707 if (inv->move)
708 return;
709 inv->move = true;
711 if (dump_file)
712 fprintf (dump_file, "Decided to move invariant %d\n", invno);
714 EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, invno, bi)
716 set_move_mark (invno);
720 /* Determines which invariants to move. DF is the dataflow object. */
722 static void
723 find_invariants_to_move (struct df *df)
725 unsigned i, regs_used, n_inv_uses, regs_needed = 0, new_regs;
726 struct invariant *inv = NULL;
728 if (!VEC_length (invariant_p, invariants))
729 return;
731 /* Now something slightly more involved. First estimate the number of used
732 registers. */
733 n_inv_uses = 0;
735 /* We do not really do a good job in this estimation; put some initial bound
736 here to stand for induction variables etc. that we do not detect. */
737 regs_used = 2;
739 for (i = 0; i < df->n_regs; i++)
741 if (!DF_REGNO_FIRST_DEF (df, i) && DF_REGNO_LAST_USE (df, i))
743 /* This is a value that is used but not changed inside loop. */
744 regs_used++;
748 for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
750 if (inv->def)
751 n_inv_uses += inv->def->n_uses;
754 new_regs = 0;
755 while (best_gain_for_invariant (&inv, &regs_needed,
756 new_regs, regs_used, n_inv_uses) > 0)
758 set_move_mark (inv->invno);
759 new_regs += regs_needed;
763 /* Move invariant INVNO out of the LOOP. DF is the dataflow object. */
765 static void
766 move_invariant_reg (struct loop *loop, unsigned invno, struct df *df)
768 struct invariant *inv = VEC_index (invariant_p, invariants, invno);
769 unsigned i;
770 basic_block preheader = loop_preheader_edge (loop)->src;
771 rtx reg, set;
772 struct use *use;
773 bitmap_iterator bi;
775 if (inv->processed)
776 return;
777 inv->processed = true;
779 if (inv->depends_on)
781 EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, i, bi)
783 move_invariant_reg (loop, i, df);
787 /* Move the set out of the loop. If the set is always executed (we could
788 omit this condition if we know that the register is unused outside of the
789 loop, but it does not seem worth finding out) and it has no uses that
790 would not be dominated by it, we may just move it (TODO). Otherwise we
791 need to create a temporary register. */
792 set = single_set (inv->insn);
793 reg = gen_reg_rtx (GET_MODE (SET_DEST (set)));
794 df_pattern_emit_after (df, gen_move_insn (SET_DEST (set), reg),
795 BLOCK_FOR_INSN (inv->insn), inv->insn);
796 SET_DEST (set) = reg;
797 reorder_insns (inv->insn, inv->insn, BB_END (preheader));
798 df_insn_modify (df, preheader, inv->insn);
800 /* Replace the uses we know to be dominated. It saves work for copy
801 propagation, and also it is necessary so that dependent invariants
802 are computed right. */
803 if (inv->def)
805 for (use = inv->def->uses; use; use = use->next)
807 *use->pos = reg;
808 df_insn_modify (df, BLOCK_FOR_INSN (use->insn), use->insn);
813 /* Move selected invariant out of the LOOP. Newly created regs are marked
814 in TEMPORARY_REGS. DF is the dataflow object. */
816 static void
817 move_invariants (struct loop *loop, struct df *df)
819 struct invariant *inv;
820 unsigned i;
822 for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
824 if (inv->move)
825 move_invariant_reg (loop, i, df);
829 /* Initializes invariant motion data. */
831 static void
832 init_inv_motion_data (void)
834 actual_stamp = 1;
836 invariants = VEC_alloc (invariant_p, heap, 100);
839 /* Frees the data allocated by invariant motion. DF is the dataflow
840 object. */
842 static void
843 free_inv_motion_data (struct df *df)
845 unsigned i;
846 struct def *def;
847 struct invariant *inv;
849 for (i = 0; i < df->n_defs; i++)
851 if (!df->defs[i])
852 continue;
854 def = DF_REF_DATA (df->defs[i]);
855 if (!def)
856 continue;
858 free_use_list (def->uses);
859 free (def);
860 DF_REF_DATA (df->defs[i]) = NULL;
863 for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
865 BITMAP_FREE (inv->depends_on);
866 free (inv);
868 VEC_free (invariant_p, heap, invariants);
871 /* Move the invariants out of the LOOP. DF is the dataflow object. */
873 static void
874 move_single_loop_invariants (struct loop *loop, struct df *df)
876 init_inv_motion_data ();
878 find_invariants (loop, df);
879 find_invariants_to_move (df);
880 move_invariants (loop, df);
882 free_inv_motion_data (df);
885 /* Releases the auxiliary data for LOOP. */
887 static void
888 free_loop_data (struct loop *loop)
890 struct loop_data *data = LOOP_DATA (loop);
892 free (data);
893 loop->aux = NULL;
896 /* Move the invariants out of the LOOPS. */
898 void
899 move_loop_invariants (struct loops *loops)
901 struct loop *loop;
902 unsigned i;
903 struct df *df = df_init ();
905 /* Process the loops, innermost first. */
906 loop = loops->tree_root;
907 while (loop->inner)
908 loop = loop->inner;
910 while (loop != loops->tree_root)
912 move_single_loop_invariants (loop, df);
914 if (loop->next)
916 loop = loop->next;
917 while (loop->inner)
918 loop = loop->inner;
920 else
921 loop = loop->outer;
924 for (i = 1; i < loops->num; i++)
925 if (loops->parray[i])
926 free_loop_data (loops->parray[i]);
928 df_finish (df);