1 /* Rtl-level loop invariant motion.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 /* This implements the loop invariant motion pass. It is very simple
22 (no calls, libcalls, etc.). This should be sufficient to cleanup things like
23 address arithmetics -- other more complicated invariants should be
24 eliminated on tree level either in tree-ssa-loop-im.c or in tree-ssa-pre.c.
26 We proceed loop by loop -- it is simpler than trying to handle things
27 globally and should not lose much. First we inspect all sets inside loop
28 and create a dependency graph on insns (saying "to move this insn, you must
29 also move the following insns").
31 We then need to determine what to move. We estimate the number of registers
32 used and move as many invariants as possible while we still have enough free
33 registers. We prefer the expensive invariants.
35 Then we move the selected invariants out of the loop, creating a new
36 temporaries for them if necessary. */
40 #include "coretypes.h"
43 #include "hard-reg-set.h"
45 #include "basic-block.h"
53 /* The data stored for the loop. */
57 struct loop
*outermost_exit
; /* The outermost exit of the loop. */
58 bool has_call
; /* True if the loop contains a call. */
61 #define LOOP_DATA(LOOP) ((struct loop_data *) (LOOP)->aux)
63 /* The description of an use. */
67 rtx
*pos
; /* Position of the use. */
68 rtx insn
; /* The insn in that the use occurs. */
70 struct use
*next
; /* Next use in the list. */
73 /* The description of a def. */
77 struct use
*uses
; /* The list of uses that are uniquely reached
79 unsigned n_uses
; /* Number of such uses. */
80 unsigned invno
; /* The corresponding invariant. */
83 /* The data stored for each invariant. */
87 /* The number of the invariant. */
90 /* Whether we already processed the invariant. */
93 /* The definition of the invariant. */
96 /* The insn in that it is defined. */
99 /* Whether it is always executed. */
100 bool always_executed
;
102 /* Whether to move the invariant. */
105 /* Cost if the invariant. */
108 /* The invariants it depends on. */
111 /* Used for detecting already visited invariants during determining
112 costs of movements. */
116 /* The actual stamp for marking already visited invariants during determining
117 costs of movements. */
119 static unsigned actual_stamp
;
121 /* The invariants. */
123 static varray_type invariants
;
125 /* Test for possibility of invariantness of X. */
128 check_maybe_invariant (rtx x
)
130 enum rtx_code code
= GET_CODE (x
);
145 case UNSPEC_VOLATILE
:
153 /* Load/store motion is done elsewhere. ??? Perhaps also add it here?
154 It should not be hard, and might be faster than "elsewhere". */
156 /* Just handle the most trivial case where we load from an unchanging
157 location (most importantly, pic tables). */
158 if (MEM_READONLY_P (x
))
164 /* Don't mess with insns declared volatile. */
165 if (MEM_VOLATILE_P (x
))
173 fmt
= GET_RTX_FORMAT (code
);
174 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
178 if (!check_maybe_invariant (XEXP (x
, i
)))
181 else if (fmt
[i
] == 'E')
183 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
184 if (!check_maybe_invariant (XVECEXP (x
, i
, j
)))
192 /* Determines the basic blocks inside LOOP that are always executed and
193 stores their bitmap to ALWAYS_REACHED. MAY_EXIT is a bitmap of
194 basic blocks that may either exit the loop, or contain the call that
195 does not have to return. BODY is body of the loop obtained by
196 get_loop_body_in_dom_order. */
199 compute_always_reached (struct loop
*loop
, basic_block
*body
,
200 bitmap may_exit
, bitmap always_reached
)
204 for (i
= 0; i
< loop
->num_nodes
; i
++)
206 if (dominated_by_p (CDI_DOMINATORS
, loop
->latch
, body
[i
]))
207 bitmap_set_bit (always_reached
, i
);
209 if (bitmap_bit_p (may_exit
, i
))
214 /* Finds exits out of the LOOP with body BODY. Marks blocks in that we may
215 exit the loop by cfg edge to HAS_EXIT and MAY_EXIT. In MAY_EXIT
216 additionally mark blocks that may exit due to a call. */
219 find_exits (struct loop
*loop
, basic_block
*body
,
220 bitmap may_exit
, bitmap has_exit
)
225 struct loop
*outermost_exit
= loop
, *aexit
;
226 bool has_call
= false;
229 for (i
= 0; i
< loop
->num_nodes
; i
++)
231 if (body
[i
]->loop_father
== loop
)
233 FOR_BB_INSNS (body
[i
], insn
)
236 && !CONST_OR_PURE_CALL_P (insn
))
239 bitmap_set_bit (may_exit
, i
);
244 FOR_EACH_EDGE (e
, ei
, body
[i
]->succs
)
246 if (flow_bb_inside_loop_p (loop
, e
->dest
))
249 bitmap_set_bit (may_exit
, i
);
250 bitmap_set_bit (has_exit
, i
);
251 outermost_exit
= find_common_loop (outermost_exit
,
252 e
->dest
->loop_father
);
257 /* Use the data stored for the subloop to decide whether we may exit
258 through it. It is sufficient to do this for header of the loop,
259 as other basic blocks inside it must be dominated by it. */
260 if (body
[i
]->loop_father
->header
!= body
[i
])
263 if (LOOP_DATA (body
[i
]->loop_father
)->has_call
)
266 bitmap_set_bit (may_exit
, i
);
268 aexit
= LOOP_DATA (body
[i
]->loop_father
)->outermost_exit
;
271 bitmap_set_bit (may_exit
, i
);
272 bitmap_set_bit (has_exit
, i
);
274 if (flow_loop_nested_p (aexit
, outermost_exit
))
275 outermost_exit
= aexit
;
279 loop
->aux
= xcalloc (1, sizeof (struct loop_data
));
280 LOOP_DATA (loop
)->outermost_exit
= outermost_exit
;
281 LOOP_DATA (loop
)->has_call
= has_call
;
284 /* Check whether we may assign a value to X from a register. */
287 may_assign_reg_p (rtx x
)
289 return can_copy_p (GET_MODE (x
));
292 /* Finds definitions that may correspond to invariants in LOOP with body BODY.
293 DF is the dataflow object. */
296 find_defs (struct loop
*loop
, basic_block
*body
, struct df
*df
)
299 bitmap blocks
= BITMAP_ALLOC (NULL
);
301 for (i
= 0; i
< loop
->num_nodes
; i
++)
302 bitmap_set_bit (blocks
, body
[i
]->index
);
304 df_analyze_subcfg (df
, blocks
, DF_UD_CHAIN
| DF_HARD_REGS
| DF_EQUIV_NOTES
);
305 BITMAP_FREE (blocks
);
308 /* Creates a new invariant for definition DEF in INSN, depending on invariants
309 in DEPENDS_ON. ALWAYS_EXECUTED is true if the insn is always executed,
310 unless the program ends due to a function call. */
313 create_new_invariant (struct def
*def
, rtx insn
, bitmap depends_on
,
314 bool always_executed
)
316 struct invariant
*inv
= xmalloc (sizeof (struct invariant
));
317 rtx set
= single_set (insn
);
320 inv
->always_executed
= always_executed
;
321 inv
->depends_on
= depends_on
;
323 /* If the set is simple, usually by moving it we move the whole store out of
324 the loop. Otherwise we save only cost of the computation. */
326 inv
->cost
= rtx_cost (set
, SET
);
328 inv
->cost
= rtx_cost (SET_SRC (set
), SET
);
331 inv
->processed
= false;
335 inv
->invno
= VARRAY_ACTIVE_SIZE (invariants
);
337 def
->invno
= inv
->invno
;
338 VARRAY_PUSH_GENERIC_PTR_NOGC (invariants
, inv
);
343 "Set in insn %d is invariant (%d), cost %d, depends on ",
344 INSN_UID (insn
), inv
->invno
, inv
->cost
);
345 dump_bitmap (dump_file
, inv
->depends_on
);
349 /* Record USE at DEF. */
352 record_use (struct def
*def
, rtx
*use
, rtx insn
)
354 struct use
*u
= xmalloc (sizeof (struct use
));
356 if (GET_CODE (*use
) == SUBREG
)
357 use
= &SUBREG_REG (*use
);
358 gcc_assert (REG_P (*use
));
367 /* Finds the invariants INSN depends on and store them to the DEPENDS_ON
368 bitmap. DF is the dataflow object. */
371 check_dependencies (rtx insn
, struct df
*df
, bitmap depends_on
)
373 struct df_link
*uses
, *defs
;
374 struct ref
*use
, *def
;
375 basic_block bb
= BLOCK_FOR_INSN (insn
), def_bb
;
376 struct def
*def_data
;
378 for (uses
= DF_INSN_USES (df
, insn
); uses
; uses
= uses
->next
)
382 defs
= DF_REF_CHAIN (use
);
390 def_data
= DF_REF_DATA (def
);
394 def_bb
= DF_REF_BB (def
);
395 if (!dominated_by_p (CDI_DOMINATORS
, bb
, def_bb
))
398 bitmap_set_bit (depends_on
, def_data
->invno
);
404 /* Finds invariant in INSN. ALWAYS_REACHED is true if the insn is always
405 executed. ALWAYS_EXECUTED is true if the insn is always executed,
406 unless the program ends due to a function call. DF is the dataflow
410 find_invariant_insn (rtx insn
, bool always_reached
, bool always_executed
,
419 /* Until we get rid of LIBCALLS. */
420 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
)
421 || find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
)
422 || find_reg_note (insn
, REG_NO_CONFLICT
, NULL_RTX
))
425 set
= single_set (insn
);
428 dest
= SET_DEST (set
);
430 if (GET_CODE (dest
) != REG
431 || HARD_REGISTER_P (dest
))
434 if (!check_maybe_invariant (SET_SRC (set
))
435 || !may_assign_reg_p (SET_DEST (set
)))
438 if (may_trap_p (PATTERN (insn
)))
443 /* Unless the exceptions are handled, the behavior is undefined
444 if the trap occurs. */
445 if (flag_non_call_exceptions
)
449 depends_on
= BITMAP_ALLOC (NULL
);
450 if (!check_dependencies (insn
, df
, depends_on
))
452 BITMAP_FREE (depends_on
);
458 ref
= df_find_def (df
, insn
, dest
);
459 def
= xcalloc (1, sizeof (struct def
));
460 DF_REF_DATA (ref
) = def
;
465 create_new_invariant (def
, insn
, depends_on
, always_executed
);
468 /* Record registers used in INSN that have an unique invariant definition.
469 DF is the dataflow object. */
472 record_uses (rtx insn
, struct df
*df
)
474 struct df_link
*uses
, *defs
;
475 struct ref
*use
, *def
;
476 basic_block bb
= BLOCK_FOR_INSN (insn
), def_bb
;
478 for (uses
= DF_INSN_USES (df
, insn
); uses
; uses
= uses
->next
)
482 defs
= DF_REF_CHAIN (use
);
483 if (!defs
|| defs
->next
)
486 if (!DF_REF_DATA (def
))
489 def_bb
= DF_REF_BB (def
);
490 if (!dominated_by_p (CDI_DOMINATORS
, bb
, def_bb
))
493 record_use (DF_REF_DATA (def
), DF_REF_LOC (use
), DF_REF_INSN (use
));
497 /* Finds invariants in INSN. ALWAYS_REACHED is true if the insn is always
498 executed. ALWAYS_EXECUTED is true if the insn is always executed,
499 unless the program ends due to a function call. DF is the dataflow
503 find_invariants_insn (rtx insn
, bool always_reached
, bool always_executed
,
506 find_invariant_insn (insn
, always_reached
, always_executed
, df
);
507 record_uses (insn
, df
);
510 /* Finds invariants in basic block BB. ALWAYS_REACHED is true if the
511 basic block is always executed. ALWAYS_EXECUTED is true if the basic
512 block is always executed, unless the program ends due to a function
513 call. DF is the dataflow object. */
516 find_invariants_bb (basic_block bb
, bool always_reached
, bool always_executed
,
521 FOR_BB_INSNS (bb
, insn
)
526 find_invariants_insn (insn
, always_reached
, always_executed
, df
);
530 && !CONST_OR_PURE_CALL_P (insn
))
531 always_reached
= false;
535 /* Finds invariants in LOOP with body BODY. ALWAYS_REACHED is the bitmap of
536 basic blocks in BODY that are always executed. ALWAYS_EXECUTED is the
537 bitmap of basic blocks in BODY that are always executed unless the program
538 ends due to a function call. DF is the dataflow object. */
541 find_invariants_body (struct loop
*loop
, basic_block
*body
,
542 bitmap always_reached
, bitmap always_executed
,
547 for (i
= 0; i
< loop
->num_nodes
; i
++)
548 find_invariants_bb (body
[i
],
549 bitmap_bit_p (always_reached
, i
),
550 bitmap_bit_p (always_executed
, i
),
554 /* Finds invariants in LOOP. DF is the dataflow object. */
557 find_invariants (struct loop
*loop
, struct df
*df
)
559 bitmap may_exit
= BITMAP_ALLOC (NULL
);
560 bitmap always_reached
= BITMAP_ALLOC (NULL
);
561 bitmap has_exit
= BITMAP_ALLOC (NULL
);
562 bitmap always_executed
= BITMAP_ALLOC (NULL
);
563 basic_block
*body
= get_loop_body_in_dom_order (loop
);
565 find_exits (loop
, body
, may_exit
, has_exit
);
566 compute_always_reached (loop
, body
, may_exit
, always_reached
);
567 compute_always_reached (loop
, body
, has_exit
, always_executed
);
569 find_defs (loop
, body
, df
);
570 find_invariants_body (loop
, body
, always_reached
, always_executed
, df
);
572 BITMAP_FREE (always_reached
);
573 BITMAP_FREE (always_executed
);
574 BITMAP_FREE (may_exit
);
575 BITMAP_FREE (has_exit
);
579 /* Frees a list of uses USE. */
582 free_use_list (struct use
*use
)
586 for (; use
; use
= next
)
593 /* Calculates cost and number of registers needed for moving invariant INV
594 out of the loop and stores them to *COST and *REGS_NEEDED. */
597 get_inv_cost (struct invariant
*inv
, int *comp_cost
, unsigned *regs_needed
)
600 unsigned aregs_needed
;
602 struct invariant
*dep
;
608 || inv
->stamp
== actual_stamp
)
610 inv
->stamp
= actual_stamp
;
613 (*comp_cost
) += inv
->cost
;
615 EXECUTE_IF_SET_IN_BITMAP (inv
->depends_on
, 0, depno
, bi
)
617 dep
= VARRAY_GENERIC_PTR_NOGC (invariants
, depno
);
619 get_inv_cost (dep
, &acomp_cost
, &aregs_needed
);
622 /* We need to check always_executed, since if the original value of
623 the invariant may be preserved, we may need to keep it in a
624 separate register. TODO check whether the register has an
625 use outside of the loop. */
626 && dep
->always_executed
627 && !dep
->def
->uses
->next
)
629 /* If this is a single use, after moving the dependency we will not
630 need a new register. */
634 (*regs_needed
) += aregs_needed
;
635 (*comp_cost
) += acomp_cost
;
639 /* Calculates gain for eliminating invariant INV. REGS_USED is the number
640 of registers used in the loop, N_INV_USES is the number of uses of
641 invariants, NEW_REGS is the number of new variables already added due to
642 the invariant motion. The number of registers needed for it is stored in
646 gain_for_invariant (struct invariant
*inv
, unsigned *regs_needed
,
647 unsigned new_regs
, unsigned regs_used
, unsigned n_inv_uses
)
649 int comp_cost
, size_cost
;
651 get_inv_cost (inv
, &comp_cost
, regs_needed
);
654 size_cost
= (global_cost_for_size (new_regs
+ *regs_needed
,
655 regs_used
, n_inv_uses
)
656 - global_cost_for_size (new_regs
, regs_used
, n_inv_uses
));
658 return comp_cost
- size_cost
;
661 /* Finds invariant with best gain for moving. Returns the gain, stores
662 the invariant in *BEST and number of registers needed for it to
663 *REGS_NEEDED. REGS_USED is the number of registers used in
664 the loop, N_INV_USES is the number of uses of invariants. NEW_REGS
665 is the number of new variables already added due to invariant motion. */
668 best_gain_for_invariant (struct invariant
**best
, unsigned *regs_needed
,
669 unsigned new_regs
, unsigned regs_used
,
672 struct invariant
*inv
;
674 unsigned aregs_needed
, invno
;
676 for (invno
= 0; invno
< VARRAY_ACTIVE_SIZE (invariants
); invno
++)
678 inv
= VARRAY_GENERIC_PTR_NOGC (invariants
, invno
);
682 again
= gain_for_invariant (inv
, &aregs_needed
,
683 new_regs
, regs_used
, n_inv_uses
);
688 *regs_needed
= aregs_needed
;
695 /* Marks invariant INVNO and all its dependencies for moving. */
698 set_move_mark (unsigned invno
)
700 struct invariant
*inv
= VARRAY_GENERIC_PTR_NOGC (invariants
, invno
);
708 fprintf (dump_file
, "Decided to move invariant %d\n", invno
);
710 EXECUTE_IF_SET_IN_BITMAP (inv
->depends_on
, 0, invno
, bi
)
712 set_move_mark (invno
);
716 /* Determines which invariants to move. DF is the dataflow object. */
719 find_invariants_to_move (struct df
*df
)
721 unsigned i
, regs_used
, n_inv_uses
, regs_needed
= 0, new_regs
;
722 struct invariant
*inv
= NULL
;
724 if (!VARRAY_ACTIVE_SIZE (invariants
))
727 /* Now something slightly more involved. First estimate the number of used
731 /* We do not really do a good job in this estimation; put some initial bound
732 here to stand for induction variables etc. that we do not detect. */
735 for (i
= 0; i
< df
->n_regs
; i
++)
737 if (!DF_REGNO_FIRST_DEF (df
, i
) && DF_REGNO_LAST_USE (df
, i
))
739 /* This is a value that is used but not changed inside loop. */
744 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (invariants
); i
++)
746 inv
= VARRAY_GENERIC_PTR_NOGC (invariants
, i
);
748 n_inv_uses
+= inv
->def
->n_uses
;
752 while (best_gain_for_invariant (&inv
, ®s_needed
,
753 new_regs
, regs_used
, n_inv_uses
) > 0)
755 set_move_mark (inv
->invno
);
756 new_regs
+= regs_needed
;
760 /* Move invariant INVNO out of the LOOP. DF is the dataflow object. */
763 move_invariant_reg (struct loop
*loop
, unsigned invno
, struct df
*df
)
765 struct invariant
*inv
= VARRAY_GENERIC_PTR_NOGC (invariants
, invno
);
767 basic_block preheader
= loop_preheader_edge (loop
)->src
;
774 inv
->processed
= true;
778 EXECUTE_IF_SET_IN_BITMAP (inv
->depends_on
, 0, i
, bi
)
780 move_invariant_reg (loop
, i
, df
);
784 /* Move the set out of the loop. If the set is always executed (we could
785 omit this condition if we know that the register is unused outside of the
786 loop, but it does not seem worth finding out) and it has no uses that
787 would not be dominated by it, we may just move it (TODO). Otherwise we
788 need to create a temporary register. */
789 set
= single_set (inv
->insn
);
790 reg
= gen_reg_rtx (GET_MODE (SET_DEST (set
)));
791 df_pattern_emit_after (df
, gen_move_insn (SET_DEST (set
), reg
),
792 BLOCK_FOR_INSN (inv
->insn
), inv
->insn
);
793 SET_DEST (set
) = reg
;
794 reorder_insns (inv
->insn
, inv
->insn
, BB_END (preheader
));
795 df_insn_modify (df
, preheader
, inv
->insn
);
797 /* Replace the uses we know to be dominated. It saves work for copy
798 propagation, and also it is necessary so that dependent invariants
799 are computed right. */
802 for (use
= inv
->def
->uses
; use
; use
= use
->next
)
805 df_insn_modify (df
, BLOCK_FOR_INSN (use
->insn
), use
->insn
);
810 /* Move selected invariant out of the LOOP. Newly created regs are marked
811 in TEMPORARY_REGS. DF is the dataflow object. */
814 move_invariants (struct loop
*loop
, struct df
*df
)
816 struct invariant
*inv
;
819 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (invariants
); i
++)
821 inv
= VARRAY_GENERIC_PTR_NOGC (invariants
, i
);
823 move_invariant_reg (loop
, i
, df
);
827 /* Initializes invariant motion data. */
830 init_inv_motion_data (void)
835 VARRAY_GENERIC_PTR_NOGC_INIT (invariants
, 100, "invariants");
838 /* Frees the data allocated by invariant motion. DF is the dataflow
842 free_inv_motion_data (struct df
*df
)
846 struct invariant
*inv
;
848 for (i
= 0; i
< df
->n_defs
; i
++)
853 def
= DF_REF_DATA (df
->defs
[i
]);
857 free_use_list (def
->uses
);
859 DF_REF_DATA (df
->defs
[i
]) = NULL
;
862 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (invariants
); i
++)
864 inv
= VARRAY_GENERIC_PTR_NOGC (invariants
, i
);
865 BITMAP_FREE (inv
->depends_on
);
868 VARRAY_POP_ALL (invariants
);
871 /* Move the invariants out of the LOOP. DF is the dataflow object. */
874 move_single_loop_invariants (struct loop
*loop
, struct df
*df
)
876 init_inv_motion_data ();
878 find_invariants (loop
, df
);
879 find_invariants_to_move (df
);
880 move_invariants (loop
, df
);
882 free_inv_motion_data (df
);
885 /* Releases the auxiliary data for LOOP. */
888 free_loop_data (struct loop
*loop
)
890 struct loop_data
*data
= LOOP_DATA (loop
);
896 /* Move the invariants out of the LOOPS. */
899 move_loop_invariants (struct loops
*loops
)
903 struct df
*df
= df_init ();
905 /* Process the loops, innermost first. */
906 loop
= loops
->tree_root
;
910 while (loop
!= loops
->tree_root
)
912 move_single_loop_invariants (loop
, df
);
924 for (i
= 1; i
< loops
->num
; i
++)
925 if (loops
->parray
[i
])
926 free_loop_data (loops
->parray
[i
]);