* expr.c (force_operand): Use convert_to_mode for conversions.
[official-gcc.git] / gcc / loop-invariant.c
blob8fe549b6b04b6f93518e191686e19ff1c877c481
1 /* Rtl-level loop invariant motion.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
21 /* This implements the loop invariant motion pass. It is very simple
22 (no calls, libcalls, etc.). This should be sufficient to cleanup things like
23 address arithmetics -- other more complicated invariants should be
24 eliminated on tree level either in tree-ssa-loop-im.c or in tree-ssa-pre.c.
26 We proceed loop by loop -- it is simpler than trying to handle things
27 globally and should not lose much. First we inspect all sets inside loop
28 and create a dependency graph on insns (saying "to move this insn, you must
29 also move the following insns").
31 We then need to determine what to move. We estimate the number of registers
32 used and move as many invariants as possible while we still have enough free
33 registers. We prefer the expensive invariants.
35 Then we move the selected invariants out of the loop, creating a new
36 temporaries for them if necessary. */
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "rtl.h"
43 #include "tm_p.h"
44 #include "hard-reg-set.h"
45 #include "obstack.h"
46 #include "basic-block.h"
47 #include "cfgloop.h"
48 #include "expr.h"
49 #include "output.h"
50 #include "function.h"
51 #include "flags.h"
52 #include "df.h"
54 /* The data stored for the loop. */
56 struct loop_data
58 struct loop *outermost_exit; /* The outermost exit of the loop. */
59 bool has_call; /* True if the loop contains a call. */
62 #define LOOP_DATA(LOOP) ((struct loop_data *) (LOOP)->aux)
64 /* The description of an use. */
66 struct use
68 rtx *pos; /* Position of the use. */
69 rtx insn; /* The insn in that the use occurs. */
71 struct use *next; /* Next use in the list. */
74 /* The description of a def. */
76 struct def
78 struct use *uses; /* The list of uses that are uniquely reached
79 by it. */
80 unsigned n_uses; /* Number of such uses. */
81 unsigned invno; /* The corresponding invariant. */
84 /* The data stored for each invariant. */
86 struct invariant
88 /* The number of the invariant. */
89 unsigned invno;
91 /* Whether we already processed the invariant. */
92 bool processed;
94 /* The definition of the invariant. */
95 struct def *def;
97 /* The insn in that it is defined. */
98 rtx insn;
100 /* Whether it is always executed. */
101 bool always_executed;
103 /* Whether to move the invariant. */
104 bool move;
106 /* Cost if the invariant. */
107 unsigned cost;
109 /* The invariants it depends on. */
110 bitmap depends_on;
112 /* Used for detecting already visited invariants during determining
113 costs of movements. */
114 unsigned stamp;
117 /* The actual stamp for marking already visited invariants during determining
118 costs of movements. */
120 static unsigned actual_stamp;
122 typedef struct invariant *invariant_p;
124 DEF_VEC_P(invariant_p);
125 DEF_VEC_ALLOC_P(invariant_p, heap);
127 /* The invariants. */
129 static VEC(invariant_p,heap) *invariants;
131 /* Test for possibility of invariantness of X. */
133 static bool
134 check_maybe_invariant (rtx x)
136 enum rtx_code code = GET_CODE (x);
137 int i, j;
138 const char *fmt;
140 switch (code)
142 case CONST_INT:
143 case CONST_DOUBLE:
144 case SYMBOL_REF:
145 case CONST:
146 case LABEL_REF:
147 return true;
149 case PC:
150 case CC0:
151 case UNSPEC_VOLATILE:
152 case CALL:
153 return false;
155 case REG:
156 return true;
158 case MEM:
159 /* Load/store motion is done elsewhere. ??? Perhaps also add it here?
160 It should not be hard, and might be faster than "elsewhere". */
162 /* Just handle the most trivial case where we load from an unchanging
163 location (most importantly, pic tables). */
164 if (MEM_READONLY_P (x))
165 break;
167 return false;
169 case ASM_OPERANDS:
170 /* Don't mess with insns declared volatile. */
171 if (MEM_VOLATILE_P (x))
172 return false;
173 break;
175 default:
176 break;
179 fmt = GET_RTX_FORMAT (code);
180 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
182 if (fmt[i] == 'e')
184 if (!check_maybe_invariant (XEXP (x, i)))
185 return false;
187 else if (fmt[i] == 'E')
189 for (j = 0; j < XVECLEN (x, i); j++)
190 if (!check_maybe_invariant (XVECEXP (x, i, j)))
191 return false;
195 return true;
198 /* Determines the basic blocks inside LOOP that are always executed and
199 stores their bitmap to ALWAYS_REACHED. MAY_EXIT is a bitmap of
200 basic blocks that may either exit the loop, or contain the call that
201 does not have to return. BODY is body of the loop obtained by
202 get_loop_body_in_dom_order. */
204 static void
205 compute_always_reached (struct loop *loop, basic_block *body,
206 bitmap may_exit, bitmap always_reached)
208 unsigned i;
210 for (i = 0; i < loop->num_nodes; i++)
212 if (dominated_by_p (CDI_DOMINATORS, loop->latch, body[i]))
213 bitmap_set_bit (always_reached, i);
215 if (bitmap_bit_p (may_exit, i))
216 return;
220 /* Finds exits out of the LOOP with body BODY. Marks blocks in that we may
221 exit the loop by cfg edge to HAS_EXIT and MAY_EXIT. In MAY_EXIT
222 additionally mark blocks that may exit due to a call. */
224 static void
225 find_exits (struct loop *loop, basic_block *body,
226 bitmap may_exit, bitmap has_exit)
228 unsigned i;
229 edge_iterator ei;
230 edge e;
231 struct loop *outermost_exit = loop, *aexit;
232 bool has_call = false;
233 rtx insn;
235 for (i = 0; i < loop->num_nodes; i++)
237 if (body[i]->loop_father == loop)
239 FOR_BB_INSNS (body[i], insn)
241 if (CALL_P (insn)
242 && !CONST_OR_PURE_CALL_P (insn))
244 has_call = true;
245 bitmap_set_bit (may_exit, i);
246 break;
250 FOR_EACH_EDGE (e, ei, body[i]->succs)
252 if (flow_bb_inside_loop_p (loop, e->dest))
253 continue;
255 bitmap_set_bit (may_exit, i);
256 bitmap_set_bit (has_exit, i);
257 outermost_exit = find_common_loop (outermost_exit,
258 e->dest->loop_father);
260 continue;
263 /* Use the data stored for the subloop to decide whether we may exit
264 through it. It is sufficient to do this for header of the loop,
265 as other basic blocks inside it must be dominated by it. */
266 if (body[i]->loop_father->header != body[i])
267 continue;
269 if (LOOP_DATA (body[i]->loop_father)->has_call)
271 has_call = true;
272 bitmap_set_bit (may_exit, i);
274 aexit = LOOP_DATA (body[i]->loop_father)->outermost_exit;
275 if (aexit != loop)
277 bitmap_set_bit (may_exit, i);
278 bitmap_set_bit (has_exit, i);
280 if (flow_loop_nested_p (aexit, outermost_exit))
281 outermost_exit = aexit;
285 loop->aux = xcalloc (1, sizeof (struct loop_data));
286 LOOP_DATA (loop)->outermost_exit = outermost_exit;
287 LOOP_DATA (loop)->has_call = has_call;
290 /* Check whether we may assign a value to X from a register. */
292 static bool
293 may_assign_reg_p (rtx x)
295 return (can_copy_p (GET_MODE (x))
296 && (!REG_P (x)
297 || !HARD_REGISTER_P (x)
298 || REGNO_REG_CLASS (REGNO (x)) != NO_REGS));
301 /* Finds definitions that may correspond to invariants in LOOP with body BODY.
302 DF is the dataflow object. */
304 static void
305 find_defs (struct loop *loop, basic_block *body, struct df *df)
307 unsigned i;
308 bitmap blocks = BITMAP_ALLOC (NULL);
310 for (i = 0; i < loop->num_nodes; i++)
311 bitmap_set_bit (blocks, body[i]->index);
313 df_analyze_subcfg (df, blocks, DF_UD_CHAIN | DF_HARD_REGS | DF_EQUIV_NOTES);
314 BITMAP_FREE (blocks);
317 /* Creates a new invariant for definition DEF in INSN, depending on invariants
318 in DEPENDS_ON. ALWAYS_EXECUTED is true if the insn is always executed,
319 unless the program ends due to a function call. */
321 static void
322 create_new_invariant (struct def *def, rtx insn, bitmap depends_on,
323 bool always_executed)
325 struct invariant *inv = xmalloc (sizeof (struct invariant));
326 rtx set = single_set (insn);
328 inv->def = def;
329 inv->always_executed = always_executed;
330 inv->depends_on = depends_on;
332 /* If the set is simple, usually by moving it we move the whole store out of
333 the loop. Otherwise we save only cost of the computation. */
334 if (def)
335 inv->cost = rtx_cost (set, SET);
336 else
337 inv->cost = rtx_cost (SET_SRC (set), SET);
339 inv->move = false;
340 inv->processed = false;
341 inv->stamp = 0;
342 inv->insn = insn;
344 inv->invno = VEC_length (invariant_p, invariants);
345 if (def)
346 def->invno = inv->invno;
347 VEC_safe_push (invariant_p, heap, invariants, inv);
349 if (dump_file)
351 fprintf (dump_file,
352 "Set in insn %d is invariant (%d), cost %d, depends on ",
353 INSN_UID (insn), inv->invno, inv->cost);
354 dump_bitmap (dump_file, inv->depends_on);
358 /* Record USE at DEF. */
360 static void
361 record_use (struct def *def, rtx *use, rtx insn)
363 struct use *u = xmalloc (sizeof (struct use));
365 if (GET_CODE (*use) == SUBREG)
366 use = &SUBREG_REG (*use);
367 gcc_assert (REG_P (*use));
369 u->pos = use;
370 u->insn = insn;
371 u->next = def->uses;
372 def->uses = u;
373 def->n_uses++;
376 /* Finds the invariants INSN depends on and store them to the DEPENDS_ON
377 bitmap. DF is the dataflow object. */
379 static bool
380 check_dependencies (rtx insn, struct df *df, bitmap depends_on)
382 struct df_link *uses, *defs;
383 struct ref *use, *def;
384 basic_block bb = BLOCK_FOR_INSN (insn), def_bb;
385 struct def *def_data;
387 for (uses = DF_INSN_USES (df, insn); uses; uses = uses->next)
389 use = uses->ref;
391 defs = DF_REF_CHAIN (use);
392 if (!defs)
393 continue;
395 if (defs->next)
396 return false;
398 def = defs->ref;
399 def_data = DF_REF_DATA (def);
400 if (!def_data)
401 return false;
403 def_bb = DF_REF_BB (def);
404 if (!dominated_by_p (CDI_DOMINATORS, bb, def_bb))
405 return false;
407 bitmap_set_bit (depends_on, def_data->invno);
410 return true;
413 /* Finds invariant in INSN. ALWAYS_REACHED is true if the insn is always
414 executed. ALWAYS_EXECUTED is true if the insn is always executed,
415 unless the program ends due to a function call. DF is the dataflow
416 object. */
418 static void
419 find_invariant_insn (rtx insn, bool always_reached, bool always_executed,
420 struct df *df)
422 struct ref *ref;
423 struct def *def;
424 bitmap depends_on;
425 rtx set, dest;
426 bool simple = true;
428 /* Until we get rid of LIBCALLS. */
429 if (find_reg_note (insn, REG_RETVAL, NULL_RTX)
430 || find_reg_note (insn, REG_LIBCALL, NULL_RTX)
431 || find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
432 return;
434 set = single_set (insn);
435 if (!set)
436 return;
437 dest = SET_DEST (set);
439 if (!REG_P (dest)
440 || HARD_REGISTER_P (dest))
441 simple = false;
443 if (!may_assign_reg_p (SET_DEST (set))
444 || !check_maybe_invariant (SET_SRC (set)))
445 return;
447 if (may_trap_p (PATTERN (insn)))
449 if (!always_reached)
450 return;
452 /* Unless the exceptions are handled, the behavior is undefined
453 if the trap occurs. */
454 if (flag_non_call_exceptions)
455 return;
458 depends_on = BITMAP_ALLOC (NULL);
459 if (!check_dependencies (insn, df, depends_on))
461 BITMAP_FREE (depends_on);
462 return;
465 if (simple)
467 ref = df_find_def (df, insn, dest);
468 def = xcalloc (1, sizeof (struct def));
469 DF_REF_DATA (ref) = def;
471 else
472 def = NULL;
474 create_new_invariant (def, insn, depends_on, always_executed);
477 /* Record registers used in INSN that have a unique invariant definition.
478 DF is the dataflow object. */
480 static void
481 record_uses (rtx insn, struct df *df)
483 struct df_link *uses, *defs;
484 struct ref *use, *def;
485 basic_block bb = BLOCK_FOR_INSN (insn), def_bb;
487 for (uses = DF_INSN_USES (df, insn); uses; uses = uses->next)
489 use = uses->ref;
491 defs = DF_REF_CHAIN (use);
492 if (!defs || defs->next)
493 continue;
494 def = defs->ref;
495 if (!DF_REF_DATA (def))
496 continue;
498 def_bb = DF_REF_BB (def);
499 if (!dominated_by_p (CDI_DOMINATORS, bb, def_bb))
500 continue;
502 record_use (DF_REF_DATA (def), DF_REF_LOC (use), DF_REF_INSN (use));
506 /* Finds invariants in INSN. ALWAYS_REACHED is true if the insn is always
507 executed. ALWAYS_EXECUTED is true if the insn is always executed,
508 unless the program ends due to a function call. DF is the dataflow
509 object. */
511 static void
512 find_invariants_insn (rtx insn, bool always_reached, bool always_executed,
513 struct df *df)
515 find_invariant_insn (insn, always_reached, always_executed, df);
516 record_uses (insn, df);
519 /* Finds invariants in basic block BB. ALWAYS_REACHED is true if the
520 basic block is always executed. ALWAYS_EXECUTED is true if the basic
521 block is always executed, unless the program ends due to a function
522 call. DF is the dataflow object. */
524 static void
525 find_invariants_bb (basic_block bb, bool always_reached, bool always_executed,
526 struct df *df)
528 rtx insn;
530 FOR_BB_INSNS (bb, insn)
532 if (!INSN_P (insn))
533 continue;
535 find_invariants_insn (insn, always_reached, always_executed, df);
537 if (always_reached
538 && CALL_P (insn)
539 && !CONST_OR_PURE_CALL_P (insn))
540 always_reached = false;
544 /* Finds invariants in LOOP with body BODY. ALWAYS_REACHED is the bitmap of
545 basic blocks in BODY that are always executed. ALWAYS_EXECUTED is the
546 bitmap of basic blocks in BODY that are always executed unless the program
547 ends due to a function call. DF is the dataflow object. */
549 static void
550 find_invariants_body (struct loop *loop, basic_block *body,
551 bitmap always_reached, bitmap always_executed,
552 struct df *df)
554 unsigned i;
556 for (i = 0; i < loop->num_nodes; i++)
557 find_invariants_bb (body[i],
558 bitmap_bit_p (always_reached, i),
559 bitmap_bit_p (always_executed, i),
560 df);
563 /* Finds invariants in LOOP. DF is the dataflow object. */
565 static void
566 find_invariants (struct loop *loop, struct df *df)
568 bitmap may_exit = BITMAP_ALLOC (NULL);
569 bitmap always_reached = BITMAP_ALLOC (NULL);
570 bitmap has_exit = BITMAP_ALLOC (NULL);
571 bitmap always_executed = BITMAP_ALLOC (NULL);
572 basic_block *body = get_loop_body_in_dom_order (loop);
574 find_exits (loop, body, may_exit, has_exit);
575 compute_always_reached (loop, body, may_exit, always_reached);
576 compute_always_reached (loop, body, has_exit, always_executed);
578 find_defs (loop, body, df);
579 find_invariants_body (loop, body, always_reached, always_executed, df);
581 BITMAP_FREE (always_reached);
582 BITMAP_FREE (always_executed);
583 BITMAP_FREE (may_exit);
584 BITMAP_FREE (has_exit);
585 free (body);
588 /* Frees a list of uses USE. */
590 static void
591 free_use_list (struct use *use)
593 struct use *next;
595 for (; use; use = next)
597 next = use->next;
598 free (use);
602 /* Calculates cost and number of registers needed for moving invariant INV
603 out of the loop and stores them to *COST and *REGS_NEEDED. */
605 static void
606 get_inv_cost (struct invariant *inv, int *comp_cost, unsigned *regs_needed)
608 int acomp_cost;
609 unsigned aregs_needed;
610 unsigned depno;
611 struct invariant *dep;
612 bitmap_iterator bi;
614 *comp_cost = 0;
615 *regs_needed = 0;
616 if (inv->move
617 || inv->stamp == actual_stamp)
618 return;
619 inv->stamp = actual_stamp;
621 (*regs_needed)++;
622 (*comp_cost) += inv->cost;
624 EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, depno, bi)
626 dep = VEC_index (invariant_p, invariants, depno);
628 get_inv_cost (dep, &acomp_cost, &aregs_needed);
630 if (aregs_needed
631 /* We need to check always_executed, since if the original value of
632 the invariant may be preserved, we may need to keep it in a
633 separate register. TODO check whether the register has an
634 use outside of the loop. */
635 && dep->always_executed
636 && !dep->def->uses->next)
638 /* If this is a single use, after moving the dependency we will not
639 need a new register. */
640 aregs_needed--;
643 (*regs_needed) += aregs_needed;
644 (*comp_cost) += acomp_cost;
648 /* Calculates gain for eliminating invariant INV. REGS_USED is the number
649 of registers used in the loop, N_INV_USES is the number of uses of
650 invariants, NEW_REGS is the number of new variables already added due to
651 the invariant motion. The number of registers needed for it is stored in
652 *REGS_NEEDED. */
654 static int
655 gain_for_invariant (struct invariant *inv, unsigned *regs_needed,
656 unsigned new_regs, unsigned regs_used, unsigned n_inv_uses)
658 int comp_cost, size_cost;
660 get_inv_cost (inv, &comp_cost, regs_needed);
661 actual_stamp++;
663 size_cost = (global_cost_for_size (new_regs + *regs_needed,
664 regs_used, n_inv_uses)
665 - global_cost_for_size (new_regs, regs_used, n_inv_uses));
667 return comp_cost - size_cost;
670 /* Finds invariant with best gain for moving. Returns the gain, stores
671 the invariant in *BEST and number of registers needed for it to
672 *REGS_NEEDED. REGS_USED is the number of registers used in
673 the loop, N_INV_USES is the number of uses of invariants. NEW_REGS
674 is the number of new variables already added due to invariant motion. */
676 static int
677 best_gain_for_invariant (struct invariant **best, unsigned *regs_needed,
678 unsigned new_regs, unsigned regs_used,
679 unsigned n_inv_uses)
681 struct invariant *inv;
682 int gain = 0, again;
683 unsigned aregs_needed, invno;
685 for (invno = 0; VEC_iterate (invariant_p, invariants, invno, inv); invno++)
687 if (inv->move)
688 continue;
690 again = gain_for_invariant (inv, &aregs_needed,
691 new_regs, regs_used, n_inv_uses);
692 if (again > gain)
694 gain = again;
695 *best = inv;
696 *regs_needed = aregs_needed;
700 return gain;
703 /* Marks invariant INVNO and all its dependencies for moving. */
705 static void
706 set_move_mark (unsigned invno)
708 struct invariant *inv = VEC_index (invariant_p, invariants, invno);
709 bitmap_iterator bi;
711 if (inv->move)
712 return;
713 inv->move = true;
715 if (dump_file)
716 fprintf (dump_file, "Decided to move invariant %d\n", invno);
718 EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, invno, bi)
720 set_move_mark (invno);
724 /* Determines which invariants to move. DF is the dataflow object. */
726 static void
727 find_invariants_to_move (struct df *df)
729 unsigned i, regs_used, n_inv_uses, regs_needed = 0, new_regs;
730 struct invariant *inv = NULL;
732 if (!VEC_length (invariant_p, invariants))
733 return;
735 /* Now something slightly more involved. First estimate the number of used
736 registers. */
737 n_inv_uses = 0;
739 /* We do not really do a good job in this estimation; put some initial bound
740 here to stand for induction variables etc. that we do not detect. */
741 regs_used = 2;
743 for (i = 0; i < df->n_regs; i++)
745 if (!DF_REGNO_FIRST_DEF (df, i) && DF_REGNO_LAST_USE (df, i))
747 /* This is a value that is used but not changed inside loop. */
748 regs_used++;
752 for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
754 if (inv->def)
755 n_inv_uses += inv->def->n_uses;
758 new_regs = 0;
759 while (best_gain_for_invariant (&inv, &regs_needed,
760 new_regs, regs_used, n_inv_uses) > 0)
762 set_move_mark (inv->invno);
763 new_regs += regs_needed;
767 /* Move invariant INVNO out of the LOOP. DF is the dataflow object. */
769 static void
770 move_invariant_reg (struct loop *loop, unsigned invno, struct df *df)
772 struct invariant *inv = VEC_index (invariant_p, invariants, invno);
773 unsigned i;
774 basic_block preheader = loop_preheader_edge (loop)->src;
775 rtx reg, set;
776 struct use *use;
777 bitmap_iterator bi;
779 if (inv->processed)
780 return;
781 inv->processed = true;
783 if (inv->depends_on)
785 EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, i, bi)
787 move_invariant_reg (loop, i, df);
791 /* Move the set out of the loop. If the set is always executed (we could
792 omit this condition if we know that the register is unused outside of the
793 loop, but it does not seem worth finding out) and it has no uses that
794 would not be dominated by it, we may just move it (TODO). Otherwise we
795 need to create a temporary register. */
796 set = single_set (inv->insn);
797 reg = gen_reg_rtx (GET_MODE (SET_DEST (set)));
798 df_pattern_emit_after (df, gen_move_insn (SET_DEST (set), reg),
799 BLOCK_FOR_INSN (inv->insn), inv->insn);
801 /* If the SET_DEST of the invariant insn is a reg, we can just move
802 the insn out of the loop. Otherwise, we have to use gen_move_insn
803 to let emit_move_insn produce a valid instruction stream. */
804 if (REG_P (SET_DEST (set)))
806 SET_DEST (set) = reg;
807 reorder_insns (inv->insn, inv->insn, BB_END (preheader));
808 df_insn_modify (df, preheader, inv->insn);
810 else
812 df_pattern_emit_after (df, gen_move_insn (reg, SET_SRC (set)),
813 preheader, BB_END (preheader));
814 df_insn_delete (df, BLOCK_FOR_INSN (inv->insn), inv->insn);
817 /* Replace the uses we know to be dominated. It saves work for copy
818 propagation, and also it is necessary so that dependent invariants
819 are computed right. */
820 if (inv->def)
822 for (use = inv->def->uses; use; use = use->next)
824 *use->pos = reg;
825 df_insn_modify (df, BLOCK_FOR_INSN (use->insn), use->insn);
830 /* Move selected invariant out of the LOOP. Newly created regs are marked
831 in TEMPORARY_REGS. DF is the dataflow object. */
833 static void
834 move_invariants (struct loop *loop, struct df *df)
836 struct invariant *inv;
837 unsigned i;
839 for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
841 if (inv->move)
842 move_invariant_reg (loop, i, df);
846 /* Initializes invariant motion data. */
848 static void
849 init_inv_motion_data (void)
851 actual_stamp = 1;
853 invariants = VEC_alloc (invariant_p, heap, 100);
856 /* Frees the data allocated by invariant motion. DF is the dataflow
857 object. */
859 static void
860 free_inv_motion_data (struct df *df)
862 unsigned i;
863 struct def *def;
864 struct invariant *inv;
866 for (i = 0; i < df->n_defs; i++)
868 if (!df->defs[i])
869 continue;
871 def = DF_REF_DATA (df->defs[i]);
872 if (!def)
873 continue;
875 free_use_list (def->uses);
876 free (def);
877 DF_REF_DATA (df->defs[i]) = NULL;
880 for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
882 BITMAP_FREE (inv->depends_on);
883 free (inv);
885 VEC_free (invariant_p, heap, invariants);
888 /* Move the invariants out of the LOOP. DF is the dataflow object. */
890 static void
891 move_single_loop_invariants (struct loop *loop, struct df *df)
893 init_inv_motion_data ();
895 find_invariants (loop, df);
896 find_invariants_to_move (df);
897 move_invariants (loop, df);
899 free_inv_motion_data (df);
902 /* Releases the auxiliary data for LOOP. */
904 static void
905 free_loop_data (struct loop *loop)
907 struct loop_data *data = LOOP_DATA (loop);
909 free (data);
910 loop->aux = NULL;
913 /* Move the invariants out of the LOOPS. */
915 void
916 move_loop_invariants (struct loops *loops)
918 struct loop *loop;
919 unsigned i;
920 struct df *df = df_init ();
922 /* Process the loops, innermost first. */
923 loop = loops->tree_root;
924 while (loop->inner)
925 loop = loop->inner;
927 while (loop != loops->tree_root)
929 move_single_loop_invariants (loop, df);
931 if (loop->next)
933 loop = loop->next;
934 while (loop->inner)
935 loop = loop->inner;
937 else
938 loop = loop->outer;
941 for (i = 1; i < loops->num; i++)
942 if (loops->parray[i])
943 free_loop_data (loops->parray[i]);
945 df_finish (df);
947 #ifdef ENABLE_CHECKING
948 verify_flow_info ();
949 #endif