* testsuite/libgomp.fortran/vla7.f90: Add -w to options.
[official-gcc.git] / gcc / loop-invariant.c
blob3ecf9d6126d222cc14d382b95391de0ae3e88141
1 /* RTL-level loop invariant motion.
2 Copyright (C) 2004, 2005, 2006 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
21 /* This implements the loop invariant motion pass. It is very simple
22 (no calls, libcalls, etc.). This should be sufficient to cleanup things
23 like address arithmetics -- other more complicated invariants should be
24 eliminated on tree level either in tree-ssa-loop-im.c or in tree-ssa-pre.c.
26 We proceed loop by loop -- it is simpler than trying to handle things
27 globally and should not lose much. First we inspect all sets inside loop
28 and create a dependency graph on insns (saying "to move this insn, you must
29 also move the following insns").
31 We then need to determine what to move. We estimate the number of registers
32 used and move as many invariants as possible while we still have enough free
33 registers. We prefer the expensive invariants.
35 Then we move the selected invariants out of the loop, creating a new
36 temporaries for them if necessary. */
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "rtl.h"
43 #include "tm_p.h"
44 #include "hard-reg-set.h"
45 #include "obstack.h"
46 #include "basic-block.h"
47 #include "cfgloop.h"
48 #include "expr.h"
49 #include "recog.h"
50 #include "output.h"
51 #include "function.h"
52 #include "flags.h"
53 #include "df.h"
54 #include "hashtab.h"
56 /* The data stored for the loop. */
58 struct loop_data
60 struct loop *outermost_exit; /* The outermost exit of the loop. */
61 bool has_call; /* True if the loop contains a call. */
64 #define LOOP_DATA(LOOP) ((struct loop_data *) (LOOP)->aux)
66 /* The description of an use. */
68 struct use
70 rtx *pos; /* Position of the use. */
71 rtx insn; /* The insn in that the use occurs. */
73 struct use *next; /* Next use in the list. */
76 /* The description of a def. */
78 struct def
80 struct use *uses; /* The list of uses that are uniquely reached
81 by it. */
82 unsigned n_uses; /* Number of such uses. */
83 unsigned invno; /* The corresponding invariant. */
86 /* The data stored for each invariant. */
88 struct invariant
90 /* The number of the invariant. */
91 unsigned invno;
93 /* The number of the invariant with the same value. */
94 unsigned eqto;
96 /* If we moved the invariant out of the loop, the register that contains its
97 value. */
98 rtx reg;
100 /* The definition of the invariant. */
101 struct def *def;
103 /* The insn in that it is defined. */
104 rtx insn;
106 /* Whether it is always executed. */
107 bool always_executed;
109 /* Whether to move the invariant. */
110 bool move;
112 /* Cost of the invariant. */
113 unsigned cost;
115 /* The invariants it depends on. */
116 bitmap depends_on;
118 /* Used for detecting already visited invariants during determining
119 costs of movements. */
120 unsigned stamp;
123 /* Entry for hash table of invariant expressions. */
125 struct invariant_expr_entry
127 /* The invariant. */
128 struct invariant *inv;
130 /* Its value. */
131 rtx expr;
133 /* Its mode. */
134 enum machine_mode mode;
136 /* Its hash. */
137 hashval_t hash;
140 /* The actual stamp for marking already visited invariants during determining
141 costs of movements. */
143 static unsigned actual_stamp;
145 typedef struct invariant *invariant_p;
147 DEF_VEC_P(invariant_p);
148 DEF_VEC_ALLOC_P(invariant_p, heap);
150 /* The invariants. */
152 static VEC(invariant_p,heap) *invariants;
154 /* The dataflow object. */
156 static struct df *df = NULL;
158 /* Test for possibility of invariantness of X. */
160 static bool
161 check_maybe_invariant (rtx x)
163 enum rtx_code code = GET_CODE (x);
164 int i, j;
165 const char *fmt;
167 switch (code)
169 case CONST_INT:
170 case CONST_DOUBLE:
171 case SYMBOL_REF:
172 case CONST:
173 case LABEL_REF:
174 return true;
176 case PC:
177 case CC0:
178 case UNSPEC_VOLATILE:
179 case CALL:
180 return false;
182 case REG:
183 return true;
185 case MEM:
186 /* Load/store motion is done elsewhere. ??? Perhaps also add it here?
187 It should not be hard, and might be faster than "elsewhere". */
189 /* Just handle the most trivial case where we load from an unchanging
190 location (most importantly, pic tables). */
191 if (MEM_READONLY_P (x))
192 break;
194 return false;
196 case ASM_OPERANDS:
197 /* Don't mess with insns declared volatile. */
198 if (MEM_VOLATILE_P (x))
199 return false;
200 break;
202 default:
203 break;
206 fmt = GET_RTX_FORMAT (code);
207 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
209 if (fmt[i] == 'e')
211 if (!check_maybe_invariant (XEXP (x, i)))
212 return false;
214 else if (fmt[i] == 'E')
216 for (j = 0; j < XVECLEN (x, i); j++)
217 if (!check_maybe_invariant (XVECEXP (x, i, j)))
218 return false;
222 return true;
225 /* Returns the invariant definition for USE, or NULL if USE is not
226 invariant. */
228 static struct invariant *
229 invariant_for_use (struct df_ref *use)
231 struct df_link *defs;
232 struct df_ref *def;
233 basic_block bb = BLOCK_FOR_INSN (use->insn), def_bb;
235 defs = DF_REF_CHAIN (use);
236 if (!defs || defs->next)
237 return NULL;
238 def = defs->ref;
239 if (!DF_REF_DATA (def))
240 return NULL;
242 def_bb = DF_REF_BB (def);
243 if (!dominated_by_p (CDI_DOMINATORS, bb, def_bb))
244 return NULL;
245 return DF_REF_DATA (def);
248 /* Computes hash value for invariant expression X in INSN. */
250 static hashval_t
251 hash_invariant_expr_1 (rtx insn, rtx x)
253 enum rtx_code code = GET_CODE (x);
254 int i, j;
255 const char *fmt;
256 hashval_t val = code;
257 int do_not_record_p;
258 struct df_ref *use;
259 struct invariant *inv;
261 switch (code)
263 case CONST_INT:
264 case CONST_DOUBLE:
265 case SYMBOL_REF:
266 case CONST:
267 case LABEL_REF:
268 return hash_rtx (x, GET_MODE (x), &do_not_record_p, NULL, false);
270 case REG:
271 use = df_find_use (df, insn, x);
272 if (!use)
273 return hash_rtx (x, GET_MODE (x), &do_not_record_p, NULL, false);
274 inv = invariant_for_use (use);
275 if (!inv)
276 return hash_rtx (x, GET_MODE (x), &do_not_record_p, NULL, false);
278 gcc_assert (inv->eqto != ~0u);
279 return inv->eqto;
281 default:
282 break;
285 fmt = GET_RTX_FORMAT (code);
286 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
288 if (fmt[i] == 'e')
289 val ^= hash_invariant_expr_1 (insn, XEXP (x, i));
290 else if (fmt[i] == 'E')
292 for (j = 0; j < XVECLEN (x, i); j++)
293 val ^= hash_invariant_expr_1 (insn, XVECEXP (x, i, j));
297 return val;
300 /* Returns true if the invariant expressions E1 and E2 used in insns INSN1
301 and INSN2 have always the same value. */
303 static bool
304 invariant_expr_equal_p (rtx insn1, rtx e1, rtx insn2, rtx e2)
306 enum rtx_code code = GET_CODE (e1);
307 int i, j;
308 const char *fmt;
309 struct df_ref *use1, *use2;
310 struct invariant *inv1 = NULL, *inv2 = NULL;
311 rtx sub1, sub2;
313 /* If mode of only one of the operands is VOIDmode, it is not equivalent to
314 the other one. If both are VOIDmode, we rely on the caller of this
315 function to verify that their modes are the same. */
316 if (code != GET_CODE (e2) || GET_MODE (e1) != GET_MODE (e2))
317 return false;
319 switch (code)
321 case CONST_INT:
322 case CONST_DOUBLE:
323 case SYMBOL_REF:
324 case CONST:
325 case LABEL_REF:
326 return rtx_equal_p (e1, e2);
328 case REG:
329 use1 = df_find_use (df, insn1, e1);
330 use2 = df_find_use (df, insn2, e2);
331 if (use1)
332 inv1 = invariant_for_use (use1);
333 if (use2)
334 inv2 = invariant_for_use (use2);
336 if (!inv1 && !inv2)
337 return rtx_equal_p (e1, e2);
339 if (!inv1 || !inv2)
340 return false;
342 gcc_assert (inv1->eqto != ~0u);
343 gcc_assert (inv2->eqto != ~0u);
344 return inv1->eqto == inv2->eqto;
346 default:
347 break;
350 fmt = GET_RTX_FORMAT (code);
351 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
353 if (fmt[i] == 'e')
355 sub1 = XEXP (e1, i);
356 sub2 = XEXP (e2, i);
358 if (!invariant_expr_equal_p (insn1, sub1, insn2, sub2))
359 return false;
362 else if (fmt[i] == 'E')
364 if (XVECLEN (e1, i) != XVECLEN (e2, i))
365 return false;
367 for (j = 0; j < XVECLEN (e1, i); j++)
369 sub1 = XVECEXP (e1, i, j);
370 sub2 = XVECEXP (e2, i, j);
372 if (!invariant_expr_equal_p (insn1, sub1, insn2, sub2))
373 return false;
378 return true;
381 /* Returns hash value for invariant expression entry E. */
383 static hashval_t
384 hash_invariant_expr (const void *e)
386 const struct invariant_expr_entry *entry = e;
388 return entry->hash;
391 /* Compares invariant expression entries E1 and E2. */
393 static int
394 eq_invariant_expr (const void *e1, const void *e2)
396 const struct invariant_expr_entry *entry1 = e1;
397 const struct invariant_expr_entry *entry2 = e2;
399 if (entry1->mode != entry2->mode)
400 return 0;
402 return invariant_expr_equal_p (entry1->inv->insn, entry1->expr,
403 entry2->inv->insn, entry2->expr);
406 /* Checks whether invariant with value EXPR in machine mode MODE is
407 recorded in EQ. If this is the case, return the invariant. Otherwise
408 insert INV to the table for this expression and return INV. */
410 static struct invariant *
411 find_or_insert_inv (htab_t eq, rtx expr, enum machine_mode mode,
412 struct invariant *inv)
414 hashval_t hash = hash_invariant_expr_1 (inv->insn, expr);
415 struct invariant_expr_entry *entry;
416 struct invariant_expr_entry pentry;
417 PTR *slot;
419 pentry.expr = expr;
420 pentry.inv = inv;
421 pentry.mode = mode;
422 slot = htab_find_slot_with_hash (eq, &pentry, hash, INSERT);
423 entry = *slot;
425 if (entry)
426 return entry->inv;
428 entry = XNEW (struct invariant_expr_entry);
429 entry->inv = inv;
430 entry->expr = expr;
431 entry->mode = mode;
432 entry->hash = hash;
433 *slot = entry;
435 return inv;
438 /* Finds invariants identical to INV and records the equivalence. EQ is the
439 hash table of the invariants. */
441 static void
442 find_identical_invariants (htab_t eq, struct invariant *inv)
444 unsigned depno;
445 bitmap_iterator bi;
446 struct invariant *dep;
447 rtx expr, set;
448 enum machine_mode mode;
450 if (inv->eqto != ~0u)
451 return;
453 EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, depno, bi)
455 dep = VEC_index (invariant_p, invariants, depno);
456 find_identical_invariants (eq, dep);
459 set = single_set (inv->insn);
460 expr = SET_SRC (set);
461 mode = GET_MODE (expr);
462 if (mode == VOIDmode)
463 mode = GET_MODE (SET_DEST (set));
464 inv->eqto = find_or_insert_inv (eq, expr, mode, inv)->invno;
466 if (dump_file && inv->eqto != inv->invno)
467 fprintf (dump_file,
468 "Invariant %d is equivalent to invariant %d.\n ",
469 inv->invno, inv->eqto);
472 /* Find invariants with the same value and record the equivalences. */
474 static void
475 merge_identical_invariants (void)
477 unsigned i;
478 struct invariant *inv;
479 htab_t eq = htab_create (VEC_length (invariant_p, invariants),
480 hash_invariant_expr, eq_invariant_expr, free);
482 for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
483 find_identical_invariants (eq, inv);
485 htab_delete (eq);
488 /* Determines the basic blocks inside LOOP that are always executed and
489 stores their bitmap to ALWAYS_REACHED. MAY_EXIT is a bitmap of
490 basic blocks that may either exit the loop, or contain the call that
491 does not have to return. BODY is body of the loop obtained by
492 get_loop_body_in_dom_order. */
494 static void
495 compute_always_reached (struct loop *loop, basic_block *body,
496 bitmap may_exit, bitmap always_reached)
498 unsigned i;
500 for (i = 0; i < loop->num_nodes; i++)
502 if (dominated_by_p (CDI_DOMINATORS, loop->latch, body[i]))
503 bitmap_set_bit (always_reached, i);
505 if (bitmap_bit_p (may_exit, i))
506 return;
510 /* Finds exits out of the LOOP with body BODY. Marks blocks in that we may
511 exit the loop by cfg edge to HAS_EXIT and MAY_EXIT. In MAY_EXIT
512 additionally mark blocks that may exit due to a call. */
514 static void
515 find_exits (struct loop *loop, basic_block *body,
516 bitmap may_exit, bitmap has_exit)
518 unsigned i;
519 edge_iterator ei;
520 edge e;
521 struct loop *outermost_exit = loop, *aexit;
522 bool has_call = false;
523 rtx insn;
525 for (i = 0; i < loop->num_nodes; i++)
527 if (body[i]->loop_father == loop)
529 FOR_BB_INSNS (body[i], insn)
531 if (CALL_P (insn)
532 && !CONST_OR_PURE_CALL_P (insn))
534 has_call = true;
535 bitmap_set_bit (may_exit, i);
536 break;
540 FOR_EACH_EDGE (e, ei, body[i]->succs)
542 if (flow_bb_inside_loop_p (loop, e->dest))
543 continue;
545 bitmap_set_bit (may_exit, i);
546 bitmap_set_bit (has_exit, i);
547 outermost_exit = find_common_loop (outermost_exit,
548 e->dest->loop_father);
550 continue;
553 /* Use the data stored for the subloop to decide whether we may exit
554 through it. It is sufficient to do this for header of the loop,
555 as other basic blocks inside it must be dominated by it. */
556 if (body[i]->loop_father->header != body[i])
557 continue;
559 if (LOOP_DATA (body[i]->loop_father)->has_call)
561 has_call = true;
562 bitmap_set_bit (may_exit, i);
564 aexit = LOOP_DATA (body[i]->loop_father)->outermost_exit;
565 if (aexit != loop)
567 bitmap_set_bit (may_exit, i);
568 bitmap_set_bit (has_exit, i);
570 if (flow_loop_nested_p (aexit, outermost_exit))
571 outermost_exit = aexit;
575 loop->aux = xcalloc (1, sizeof (struct loop_data));
576 LOOP_DATA (loop)->outermost_exit = outermost_exit;
577 LOOP_DATA (loop)->has_call = has_call;
580 /* Check whether we may assign a value to X from a register. */
582 static bool
583 may_assign_reg_p (rtx x)
585 return (can_copy_p (GET_MODE (x))
586 && (!REG_P (x)
587 || !HARD_REGISTER_P (x)
588 || REGNO_REG_CLASS (REGNO (x)) != NO_REGS));
591 /* Finds definitions that may correspond to invariants in LOOP with body
592 BODY. */
594 static void
595 find_defs (struct loop *loop, basic_block *body)
597 unsigned i;
598 bitmap blocks = BITMAP_ALLOC (NULL);
600 for (i = 0; i < loop->num_nodes; i++)
601 bitmap_set_bit (blocks, body[i]->index);
603 df_set_blocks (df, blocks);
604 df_analyze (df);
605 BITMAP_FREE (blocks);
608 /* Creates a new invariant for definition DEF in INSN, depending on invariants
609 in DEPENDS_ON. ALWAYS_EXECUTED is true if the insn is always executed,
610 unless the program ends due to a function call. The newly created invariant
611 is returned. */
613 static struct invariant *
614 create_new_invariant (struct def *def, rtx insn, bitmap depends_on,
615 bool always_executed)
617 struct invariant *inv = XNEW (struct invariant);
618 rtx set = single_set (insn);
620 inv->def = def;
621 inv->always_executed = always_executed;
622 inv->depends_on = depends_on;
624 /* If the set is simple, usually by moving it we move the whole store out of
625 the loop. Otherwise we save only cost of the computation. */
626 if (def)
627 inv->cost = rtx_cost (set, SET);
628 else
629 inv->cost = rtx_cost (SET_SRC (set), SET);
631 inv->move = false;
632 inv->reg = NULL_RTX;
633 inv->stamp = 0;
634 inv->insn = insn;
636 inv->invno = VEC_length (invariant_p, invariants);
637 inv->eqto = ~0u;
638 if (def)
639 def->invno = inv->invno;
640 VEC_safe_push (invariant_p, heap, invariants, inv);
642 if (dump_file)
644 fprintf (dump_file,
645 "Set in insn %d is invariant (%d), cost %d, depends on ",
646 INSN_UID (insn), inv->invno, inv->cost);
647 dump_bitmap (dump_file, inv->depends_on);
650 return inv;
653 /* Record USE at DEF. */
655 static void
656 record_use (struct def *def, rtx *use, rtx insn)
658 struct use *u = XNEW (struct use);
660 if (GET_CODE (*use) == SUBREG)
661 use = &SUBREG_REG (*use);
662 gcc_assert (REG_P (*use));
664 u->pos = use;
665 u->insn = insn;
666 u->next = def->uses;
667 def->uses = u;
668 def->n_uses++;
671 /* Finds the invariants INSN depends on and store them to the DEPENDS_ON
672 bitmap. */
674 static bool
675 check_dependencies (rtx insn, bitmap depends_on)
677 struct df_link *defs;
678 struct df_ref *use, *def;
679 basic_block bb = BLOCK_FOR_INSN (insn), def_bb;
680 struct def *def_data;
681 struct invariant *inv;
683 for (use = DF_INSN_GET (df, insn)->uses; use; use = use->next_ref)
685 defs = DF_REF_CHAIN (use);
686 if (!defs)
687 continue;
689 if (defs->next)
690 return false;
692 def = defs->ref;
693 inv = DF_REF_DATA (def);
694 if (!inv)
695 return false;
697 def_data = inv->def;
698 gcc_assert (def_data != NULL);
700 def_bb = DF_REF_BB (def);
701 /* Note that in case bb == def_bb, we know that the definition dominates
702 insn, because def has DF_REF_DATA defined and we process the insns
703 in the basic block bb sequentially. */
704 if (!dominated_by_p (CDI_DOMINATORS, bb, def_bb))
705 return false;
707 bitmap_set_bit (depends_on, def_data->invno);
710 return true;
713 /* Finds invariant in INSN. ALWAYS_REACHED is true if the insn is always
714 executed. ALWAYS_EXECUTED is true if the insn is always executed,
715 unless the program ends due to a function call. */
717 static void
718 find_invariant_insn (rtx insn, bool always_reached, bool always_executed)
720 struct df_ref *ref;
721 struct def *def;
722 bitmap depends_on;
723 rtx set, dest;
724 bool simple = true;
725 struct invariant *inv;
727 /* Until we get rid of LIBCALLS. */
728 if (find_reg_note (insn, REG_RETVAL, NULL_RTX)
729 || find_reg_note (insn, REG_LIBCALL, NULL_RTX)
730 || find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
731 return;
733 set = single_set (insn);
734 if (!set)
735 return;
736 dest = SET_DEST (set);
738 if (!REG_P (dest)
739 || HARD_REGISTER_P (dest))
740 simple = false;
742 if (!may_assign_reg_p (SET_DEST (set))
743 || !check_maybe_invariant (SET_SRC (set)))
744 return;
746 if (may_trap_p (PATTERN (insn)))
748 if (!always_reached)
749 return;
751 /* Unless the exceptions are handled, the behavior is undefined
752 if the trap occurs. */
753 if (flag_non_call_exceptions)
754 return;
757 depends_on = BITMAP_ALLOC (NULL);
758 if (!check_dependencies (insn, depends_on))
760 BITMAP_FREE (depends_on);
761 return;
764 if (simple)
765 def = XCNEW (struct def);
766 else
767 def = NULL;
769 inv = create_new_invariant (def, insn, depends_on, always_executed);
771 if (simple)
773 ref = df_find_def (df, insn, dest);
774 DF_REF_DATA (ref) = inv;
778 /* Record registers used in INSN that have a unique invariant definition. */
780 static void
781 record_uses (rtx insn)
783 struct df_ref *use;
784 struct invariant *inv;
786 for (use = DF_INSN_GET (df, insn)->uses; use; use = use->next_ref)
788 inv = invariant_for_use (use);
789 if (inv)
790 record_use (inv->def, DF_REF_LOC (use), DF_REF_INSN (use));
794 /* Finds invariants in INSN. ALWAYS_REACHED is true if the insn is always
795 executed. ALWAYS_EXECUTED is true if the insn is always executed,
796 unless the program ends due to a function call. */
798 static void
799 find_invariants_insn (rtx insn, bool always_reached, bool always_executed)
801 find_invariant_insn (insn, always_reached, always_executed);
802 record_uses (insn);
805 /* Finds invariants in basic block BB. ALWAYS_REACHED is true if the
806 basic block is always executed. ALWAYS_EXECUTED is true if the basic
807 block is always executed, unless the program ends due to a function
808 call. */
810 static void
811 find_invariants_bb (basic_block bb, bool always_reached, bool always_executed)
813 rtx insn;
815 FOR_BB_INSNS (bb, insn)
817 if (!INSN_P (insn))
818 continue;
820 find_invariants_insn (insn, always_reached, always_executed);
822 if (always_reached
823 && CALL_P (insn)
824 && !CONST_OR_PURE_CALL_P (insn))
825 always_reached = false;
829 /* Finds invariants in LOOP with body BODY. ALWAYS_REACHED is the bitmap of
830 basic blocks in BODY that are always executed. ALWAYS_EXECUTED is the
831 bitmap of basic blocks in BODY that are always executed unless the program
832 ends due to a function call. */
834 static void
835 find_invariants_body (struct loop *loop, basic_block *body,
836 bitmap always_reached, bitmap always_executed)
838 unsigned i;
840 for (i = 0; i < loop->num_nodes; i++)
841 find_invariants_bb (body[i],
842 bitmap_bit_p (always_reached, i),
843 bitmap_bit_p (always_executed, i));
846 /* Finds invariants in LOOP. */
848 static void
849 find_invariants (struct loop *loop)
851 bitmap may_exit = BITMAP_ALLOC (NULL);
852 bitmap always_reached = BITMAP_ALLOC (NULL);
853 bitmap has_exit = BITMAP_ALLOC (NULL);
854 bitmap always_executed = BITMAP_ALLOC (NULL);
855 basic_block *body = get_loop_body_in_dom_order (loop);
857 find_exits (loop, body, may_exit, has_exit);
858 compute_always_reached (loop, body, may_exit, always_reached);
859 compute_always_reached (loop, body, has_exit, always_executed);
861 find_defs (loop, body);
862 find_invariants_body (loop, body, always_reached, always_executed);
863 merge_identical_invariants ();
865 BITMAP_FREE (always_reached);
866 BITMAP_FREE (always_executed);
867 BITMAP_FREE (may_exit);
868 BITMAP_FREE (has_exit);
869 free (body);
872 /* Frees a list of uses USE. */
874 static void
875 free_use_list (struct use *use)
877 struct use *next;
879 for (; use; use = next)
881 next = use->next;
882 free (use);
886 /* Calculates cost and number of registers needed for moving invariant INV
887 out of the loop and stores them to *COST and *REGS_NEEDED. */
889 static void
890 get_inv_cost (struct invariant *inv, int *comp_cost, unsigned *regs_needed)
892 int acomp_cost;
893 unsigned aregs_needed;
894 unsigned depno;
895 struct invariant *dep;
896 bitmap_iterator bi;
898 /* Find the representative of the class of the equivalent invariants. */
899 inv = VEC_index (invariant_p, invariants, inv->eqto);
901 *comp_cost = 0;
902 *regs_needed = 0;
903 if (inv->move
904 || inv->stamp == actual_stamp)
905 return;
906 inv->stamp = actual_stamp;
908 (*regs_needed)++;
909 (*comp_cost) += inv->cost;
911 EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, depno, bi)
913 dep = VEC_index (invariant_p, invariants, depno);
915 get_inv_cost (dep, &acomp_cost, &aregs_needed);
917 if (aregs_needed
918 /* We need to check always_executed, since if the original value of
919 the invariant may be preserved, we may need to keep it in a
920 separate register. TODO check whether the register has an
921 use outside of the loop. */
922 && dep->always_executed
923 && !dep->def->uses->next)
925 /* If this is a single use, after moving the dependency we will not
926 need a new register. */
927 aregs_needed--;
930 (*regs_needed) += aregs_needed;
931 (*comp_cost) += acomp_cost;
935 /* Calculates gain for eliminating invariant INV. REGS_USED is the number
936 of registers used in the loop, N_INV_USES is the number of uses of
937 invariants, NEW_REGS is the number of new variables already added due to
938 the invariant motion. The number of registers needed for it is stored in
939 *REGS_NEEDED. */
941 static int
942 gain_for_invariant (struct invariant *inv, unsigned *regs_needed,
943 unsigned new_regs, unsigned regs_used, unsigned n_inv_uses)
945 int comp_cost, size_cost;
947 get_inv_cost (inv, &comp_cost, regs_needed);
948 actual_stamp++;
950 size_cost = (global_cost_for_size (new_regs + *regs_needed,
951 regs_used, n_inv_uses)
952 - global_cost_for_size (new_regs, regs_used, n_inv_uses));
954 return comp_cost - size_cost;
957 /* Finds invariant with best gain for moving. Returns the gain, stores
958 the invariant in *BEST and number of registers needed for it to
959 *REGS_NEEDED. REGS_USED is the number of registers used in
960 the loop, N_INV_USES is the number of uses of invariants. NEW_REGS
961 is the number of new variables already added due to invariant motion. */
963 static int
964 best_gain_for_invariant (struct invariant **best, unsigned *regs_needed,
965 unsigned new_regs, unsigned regs_used,
966 unsigned n_inv_uses)
968 struct invariant *inv;
969 int gain = 0, again;
970 unsigned aregs_needed, invno;
972 for (invno = 0; VEC_iterate (invariant_p, invariants, invno, inv); invno++)
974 if (inv->move)
975 continue;
977 /* Only consider the "representatives" of equivalent invariants. */
978 if (inv->eqto != inv->invno)
979 continue;
981 again = gain_for_invariant (inv, &aregs_needed,
982 new_regs, regs_used, n_inv_uses);
983 if (again > gain)
985 gain = again;
986 *best = inv;
987 *regs_needed = aregs_needed;
991 return gain;
994 /* Marks invariant INVNO and all its dependencies for moving. */
996 static void
997 set_move_mark (unsigned invno)
999 struct invariant *inv = VEC_index (invariant_p, invariants, invno);
1000 bitmap_iterator bi;
1002 /* Find the representative of the class of the equivalent invariants. */
1003 inv = VEC_index (invariant_p, invariants, inv->eqto);
1005 if (inv->move)
1006 return;
1007 inv->move = true;
1009 if (dump_file)
1010 fprintf (dump_file, "Decided to move invariant %d\n", invno);
1012 EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, invno, bi)
1014 set_move_mark (invno);
1018 /* Determines which invariants to move. */
1020 static void
1021 find_invariants_to_move (void)
1023 unsigned i, regs_used, n_inv_uses, regs_needed = 0, new_regs;
1024 struct invariant *inv = NULL;
1025 unsigned int n_regs = DF_REG_SIZE (df);
1027 if (!VEC_length (invariant_p, invariants))
1028 return;
1030 /* Now something slightly more involved. First estimate the number of used
1031 registers. */
1032 n_inv_uses = 0;
1034 /* We do not really do a good job in this estimation; put some initial bound
1035 here to stand for induction variables etc. that we do not detect. */
1036 regs_used = 2;
1038 for (i = 0; i < n_regs; i++)
1040 if (!DF_REGNO_FIRST_DEF (df, i) && DF_REGNO_LAST_USE (df, i))
1042 /* This is a value that is used but not changed inside loop. */
1043 regs_used++;
1047 for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
1049 if (inv->def)
1050 n_inv_uses += inv->def->n_uses;
1053 new_regs = 0;
1054 while (best_gain_for_invariant (&inv, &regs_needed,
1055 new_regs, regs_used, n_inv_uses) > 0)
1057 set_move_mark (inv->invno);
1058 new_regs += regs_needed;
1062 /* Move invariant INVNO out of the LOOP. */
1064 static void
1065 move_invariant_reg (struct loop *loop, unsigned invno)
1067 struct invariant *inv = VEC_index (invariant_p, invariants, invno);
1068 struct invariant *repr = VEC_index (invariant_p, invariants, inv->eqto);
1069 unsigned i;
1070 basic_block preheader = loop_preheader_edge (loop)->src;
1071 rtx reg, set;
1072 struct use *use;
1073 bitmap_iterator bi;
1075 if (inv->reg
1076 || !repr->move)
1077 return;
1079 /* If this is a representative of the class of equivalent invariants,
1080 really move the invariant. Otherwise just replace its use with
1081 the register used for the representative. */
1082 if (inv == repr)
1084 if (inv->depends_on)
1086 EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, i, bi)
1088 move_invariant_reg (loop, i);
1092 /* Move the set out of the loop. If the set is always executed (we could
1093 omit this condition if we know that the register is unused outside of the
1094 loop, but it does not seem worth finding out) and it has no uses that
1095 would not be dominated by it, we may just move it (TODO). Otherwise we
1096 need to create a temporary register. */
1097 set = single_set (inv->insn);
1098 reg = gen_reg_rtx (GET_MODE (SET_DEST (set)));
1099 emit_insn_after (gen_move_insn (SET_DEST (set), reg), inv->insn);
1101 /* If the SET_DEST of the invariant insn is a reg, we can just move
1102 the insn out of the loop. Otherwise, we have to use gen_move_insn
1103 to let emit_move_insn produce a valid instruction stream. */
1104 if (REG_P (SET_DEST (set)))
1106 SET_DEST (set) = reg;
1107 reorder_insns (inv->insn, inv->insn, BB_END (preheader));
1109 else
1111 emit_insn_after (gen_move_insn (reg, SET_SRC (set)), BB_END (preheader));
1112 delete_insn (inv->insn);
1115 else
1117 move_invariant_reg (loop, repr->invno);
1118 reg = repr->reg;
1119 set = single_set (inv->insn);
1120 emit_insn_after (gen_move_insn (SET_DEST (set), reg), inv->insn);
1121 delete_insn (inv->insn);
1124 inv->reg = reg;
1126 /* Replace the uses we know to be dominated. It saves work for copy
1127 propagation, and also it is necessary so that dependent invariants
1128 are computed right. */
1129 if (inv->def)
1131 for (use = inv->def->uses; use; use = use->next)
1132 *use->pos = reg;
1136 /* Move selected invariant out of the LOOP. Newly created regs are marked
1137 in TEMPORARY_REGS. */
1139 static void
1140 move_invariants (struct loop *loop)
1142 struct invariant *inv;
1143 unsigned i;
1145 for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
1146 move_invariant_reg (loop, i);
1149 /* Initializes invariant motion data. */
1151 static void
1152 init_inv_motion_data (void)
1154 actual_stamp = 1;
1156 invariants = VEC_alloc (invariant_p, heap, 100);
1159 /* Frees the data allocated by invariant motion. */
1161 static void
1162 free_inv_motion_data (void)
1164 unsigned i;
1165 struct def *def;
1166 struct invariant *inv;
1168 for (i = 0; i < DF_DEFS_SIZE (df); i++)
1170 struct df_ref * ref = DF_DEFS_GET (df, i);
1171 if (!ref)
1172 continue;
1174 inv = DF_REF_DATA (ref);
1175 if (!inv)
1176 continue;
1178 def = inv->def;
1179 gcc_assert (def != NULL);
1181 free_use_list (def->uses);
1182 free (def);
1183 DF_REF_DATA (ref) = NULL;
1186 for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
1188 BITMAP_FREE (inv->depends_on);
1189 free (inv);
1191 VEC_free (invariant_p, heap, invariants);
1194 /* Move the invariants out of the LOOP. */
1196 static void
1197 move_single_loop_invariants (struct loop *loop)
1199 init_inv_motion_data ();
1201 find_invariants (loop);
1202 find_invariants_to_move ();
1203 move_invariants (loop);
1205 free_inv_motion_data ();
1208 /* Releases the auxiliary data for LOOP. */
1210 static void
1211 free_loop_data (struct loop *loop)
1213 struct loop_data *data = LOOP_DATA (loop);
1215 free (data);
1216 loop->aux = NULL;
1219 /* Move the invariants out of the LOOPS. */
1221 void
1222 move_loop_invariants (struct loops *loops)
1224 struct loop *loop;
1225 unsigned i;
1227 df = df_init (DF_HARD_REGS | DF_EQUIV_NOTES);
1228 df_chain_add_problem (df, DF_UD_CHAIN);
1230 /* Process the loops, innermost first. */
1231 loop = loops->tree_root;
1232 while (loop->inner)
1233 loop = loop->inner;
1235 while (loop != loops->tree_root)
1237 move_single_loop_invariants (loop);
1239 if (loop->next)
1241 loop = loop->next;
1242 while (loop->inner)
1243 loop = loop->inner;
1245 else
1246 loop = loop->outer;
1249 for (i = 1; i < loops->num; i++)
1250 if (loops->parray[i])
1251 free_loop_data (loops->parray[i]);
1253 df_finish (df);
1254 df = NULL;
1256 #ifdef ENABLE_CHECKING
1257 verify_flow_info ();
1258 #endif