1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 /* This is just a very simplistic analysis of induction variables of the loop.
22 The major use is for determining the number of iterations of a loop for
23 loop unrolling, doloop optimization and branch prediction. For this we
24 are only interested in bivs and a fairly limited set of givs that are
25 needed in the exit condition. We also only compute the iv information on
28 The interesting registers are determined. A register is interesting if
30 -- it is set only in the blocks that dominate the latch of the current loop
31 -- all its sets are simple -- i.e. in the form we understand
33 We also number the insns sequentially in each basic block. For a use of the
34 interesting reg, it is now easy to find a reaching definition (there may be
37 Induction variable is then simply analyzed by walking the use-def
42 iv_analysis_loop_init (loop);
43 insn = iv_get_reaching_def (where, reg);
44 if (iv_analyze (insn, reg, &iv))
48 iv_analysis_done (); */
52 #include "coretypes.h"
55 #include "hard-reg-set.h"
56 #include "basic-block.h"
61 /* The insn information. */
68 /* The previous definition of the register defined by the single
72 /* The description of the iv. */
76 static struct insn_info
*insn_info
;
78 /* The last definition of register. */
84 static struct rtx_iv
*bivs
;
86 /* Maximal insn number for that there is place in insn_info array. */
88 static unsigned max_insn_no
;
90 /* Maximal register number for that there is place in bivs and last_def
93 static unsigned max_reg_no
;
95 /* Dumps information about IV to FILE. */
97 extern void dump_iv_info (FILE *, struct rtx_iv
*);
99 dump_iv_info (FILE *file
, struct rtx_iv
*iv
)
103 fprintf (file
, "not simple");
107 if (iv
->step
== const0_rtx
)
109 fprintf (file
, "invariant ");
110 print_rtl (file
, iv
->base
);
114 print_rtl (file
, iv
->base
);
115 fprintf (file
, " + ");
116 print_rtl (file
, iv
->step
);
117 fprintf (file
, " * iteration");
118 fprintf (file
, " (in %s)", GET_MODE_NAME (iv
->mode
));
120 if (iv
->mode
!= iv
->extend_mode
)
121 fprintf (file
, " %s to %s",
122 rtx_name
[iv
->extend
],
123 GET_MODE_NAME (iv
->extend_mode
));
125 if (iv
->mult
!= const1_rtx
)
127 fprintf (file
, " * ");
128 print_rtl (file
, iv
->mult
);
130 if (iv
->delta
!= const0_rtx
)
132 fprintf (file
, " + ");
133 print_rtl (file
, iv
->delta
);
135 if (iv
->first_special
)
136 fprintf (file
, " (first special)");
139 /* Assigns luids to insns in basic block BB. */
142 assign_luids (basic_block bb
)
147 FOR_BB_INSNS (bb
, insn
)
149 uid
= INSN_UID (insn
);
150 insn_info
[uid
].luid
= i
++;
151 insn_info
[uid
].prev_def
= NULL_RTX
;
152 insn_info
[uid
].iv
.analysed
= false;
156 /* Generates a subreg to get the least significant part of EXPR (in mode
157 INNER_MODE) to OUTER_MODE. */
160 lowpart_subreg (enum machine_mode outer_mode
, rtx expr
,
161 enum machine_mode inner_mode
)
163 return simplify_gen_subreg (outer_mode
, expr
, inner_mode
,
164 subreg_lowpart_offset (outer_mode
, inner_mode
));
167 /* Checks whether REG is a well-behaved register. */
170 simple_reg_p (rtx reg
)
174 if (GET_CODE (reg
) == SUBREG
)
176 if (!subreg_lowpart_p (reg
))
178 reg
= SUBREG_REG (reg
);
185 if (HARD_REGISTER_NUM_P (r
))
188 if (GET_MODE_CLASS (GET_MODE (reg
)) != MODE_INT
)
191 if (last_def
[r
] == const0_rtx
)
197 /* Checks whether assignment LHS = RHS is simple enough for us to process. */
200 simple_set_p (rtx lhs
, rtx rhs
)
205 || !simple_reg_p (lhs
))
208 if (CONSTANT_P (rhs
))
211 switch (GET_CODE (rhs
))
215 return simple_reg_p (rhs
);
220 return simple_reg_p (XEXP (rhs
, 0));
228 if (!simple_reg_p (op0
)
229 && !CONSTANT_P (op0
))
232 if (!simple_reg_p (op1
)
233 && !CONSTANT_P (op1
))
236 if (GET_CODE (rhs
) == MULT
238 && !CONSTANT_P (op1
))
248 /* Mark single SET in INSN. */
251 mark_single_set (rtx insn
, rtx set
)
253 rtx def
= SET_DEST (set
), src
;
256 src
= find_reg_equal_equiv_note (insn
);
260 if (!simple_set_p (SET_DEST (set
), src
))
264 uid
= INSN_UID (insn
);
266 bivs
[regno
].analysed
= false;
267 insn_info
[uid
].prev_def
= last_def
[regno
];
268 last_def
[regno
] = insn
;
273 /* Invalidate register REG unless it is equal to EXCEPT. */
276 kill_sets (rtx reg
, rtx by ATTRIBUTE_UNUSED
, void *except
)
278 if (GET_CODE (reg
) == SUBREG
)
279 reg
= SUBREG_REG (reg
);
285 last_def
[REGNO (reg
)] = const0_rtx
;
288 /* Marks sets in basic block BB. If DOM is true, BB dominates the loop
292 mark_sets (basic_block bb
, bool dom
)
296 FOR_BB_INSNS (bb
, insn
)
302 && (set
= single_set (insn
)))
303 def
= mark_single_set (insn
, set
);
307 note_stores (PATTERN (insn
), kill_sets
, def
);
311 /* Prepare the data for an induction variable analysis of a LOOP. */
314 iv_analysis_loop_init (struct loop
*loop
)
316 basic_block
*body
= get_loop_body_in_dom_order (loop
);
319 if ((unsigned) get_max_uid () >= max_insn_no
)
321 /* Add some reserve for insns and registers produced in optimizations. */
322 max_insn_no
= get_max_uid () + 100;
325 insn_info
= xmalloc (max_insn_no
* sizeof (struct insn_info
));
328 if ((unsigned) max_reg_num () >= max_reg_no
)
330 max_reg_no
= max_reg_num () + 100;
333 last_def
= xmalloc (max_reg_no
* sizeof (rtx
));
336 bivs
= xmalloc (max_reg_no
* sizeof (struct rtx_iv
));
339 memset (last_def
, 0, max_reg_num () * sizeof (rtx
));
341 for (b
= 0; b
< loop
->num_nodes
; b
++)
343 assign_luids (body
[b
]);
344 mark_sets (body
[b
], just_once_each_iteration_p (loop
, body
[b
]));
350 /* Gets definition of REG reaching the INSN. If REG is not simple, const0_rtx
351 is returned. If INSN is before the first def in the loop, NULL_RTX is
355 iv_get_reaching_def (rtx insn
, rtx reg
)
357 unsigned regno
, luid
, auid
;
361 if (GET_CODE (reg
) == SUBREG
)
363 if (!subreg_lowpart_p (reg
))
365 reg
= SUBREG_REG (reg
);
372 || last_def
[regno
] == const0_rtx
)
373 return last_def
[regno
];
375 bb
= BLOCK_FOR_INSN (insn
);
376 luid
= insn_info
[INSN_UID (insn
)].luid
;
378 ainsn
= last_def
[regno
];
381 abb
= BLOCK_FOR_INSN (ainsn
);
383 if (dominated_by_p (CDI_DOMINATORS
, bb
, abb
))
386 auid
= INSN_UID (ainsn
);
387 ainsn
= insn_info
[auid
].prev_def
;
395 abb
= BLOCK_FOR_INSN (ainsn
);
399 auid
= INSN_UID (ainsn
);
400 if (luid
> insn_info
[auid
].luid
)
403 ainsn
= insn_info
[auid
].prev_def
;
409 /* Sets IV to invariant CST in MODE. Always returns true (just for
410 consistency with other iv manipulation functions that may fail). */
413 iv_constant (struct rtx_iv
*iv
, rtx cst
, enum machine_mode mode
)
415 if (mode
== VOIDmode
)
416 mode
= GET_MODE (cst
);
421 iv
->step
= const0_rtx
;
422 iv
->first_special
= false;
424 iv
->extend_mode
= iv
->mode
;
425 iv
->delta
= const0_rtx
;
426 iv
->mult
= const1_rtx
;
431 /* Evaluates application of subreg to MODE on IV. */
434 iv_subreg (struct rtx_iv
*iv
, enum machine_mode mode
)
436 if (iv
->extend_mode
== mode
)
439 if (GET_MODE_BITSIZE (mode
) > GET_MODE_BITSIZE (iv
->mode
))
445 iv
->base
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->delta
,
446 simplify_gen_binary (MULT
, iv
->extend_mode
,
447 iv
->base
, iv
->mult
));
448 iv
->step
= simplify_gen_binary (MULT
, iv
->extend_mode
, iv
->step
, iv
->mult
);
449 iv
->mult
= const1_rtx
;
450 iv
->delta
= const0_rtx
;
451 iv
->first_special
= false;
456 /* Evaluates application of EXTEND to MODE on IV. */
459 iv_extend (struct rtx_iv
*iv
, enum rtx_code extend
, enum machine_mode mode
)
461 if (mode
!= iv
->extend_mode
)
464 if (iv
->extend
!= NIL
465 && iv
->extend
!= extend
)
473 /* Evaluates negation of IV. */
476 iv_neg (struct rtx_iv
*iv
)
478 if (iv
->extend
== NIL
)
480 iv
->base
= simplify_gen_unary (NEG
, iv
->extend_mode
,
481 iv
->base
, iv
->extend_mode
);
482 iv
->step
= simplify_gen_unary (NEG
, iv
->extend_mode
,
483 iv
->step
, iv
->extend_mode
);
487 iv
->delta
= simplify_gen_unary (NEG
, iv
->extend_mode
,
488 iv
->delta
, iv
->extend_mode
);
489 iv
->mult
= simplify_gen_unary (NEG
, iv
->extend_mode
,
490 iv
->mult
, iv
->extend_mode
);
496 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
499 iv_add (struct rtx_iv
*iv0
, struct rtx_iv
*iv1
, enum rtx_code op
)
501 enum machine_mode mode
;
504 /* Extend the constant to extend_mode of the other operand if necessary. */
505 if (iv0
->extend
== NIL
506 && iv0
->mode
== iv0
->extend_mode
507 && iv0
->step
== const0_rtx
508 && GET_MODE_SIZE (iv0
->extend_mode
) < GET_MODE_SIZE (iv1
->extend_mode
))
510 iv0
->extend_mode
= iv1
->extend_mode
;
511 iv0
->base
= simplify_gen_unary (ZERO_EXTEND
, iv0
->extend_mode
,
512 iv0
->base
, iv0
->mode
);
514 if (iv1
->extend
== NIL
515 && iv1
->mode
== iv1
->extend_mode
516 && iv1
->step
== const0_rtx
517 && GET_MODE_SIZE (iv1
->extend_mode
) < GET_MODE_SIZE (iv0
->extend_mode
))
519 iv1
->extend_mode
= iv0
->extend_mode
;
520 iv1
->base
= simplify_gen_unary (ZERO_EXTEND
, iv1
->extend_mode
,
521 iv1
->base
, iv1
->mode
);
524 mode
= iv0
->extend_mode
;
525 if (mode
!= iv1
->extend_mode
)
528 if (iv0
->extend
== NIL
&& iv1
->extend
== NIL
)
530 if (iv0
->mode
!= iv1
->mode
)
533 iv0
->base
= simplify_gen_binary (op
, mode
, iv0
->base
, iv1
->base
);
534 iv0
->step
= simplify_gen_binary (op
, mode
, iv0
->step
, iv1
->step
);
539 /* Handle addition of constant. */
540 if (iv1
->extend
== NIL
542 && iv1
->step
== const0_rtx
)
544 iv0
->delta
= simplify_gen_binary (op
, mode
, iv0
->delta
, iv1
->base
);
548 if (iv0
->extend
== NIL
550 && iv0
->step
== const0_rtx
)
558 iv0
->delta
= simplify_gen_binary (PLUS
, mode
, iv0
->delta
, arg
);
565 /* Evaluates multiplication of IV by constant CST. */
568 iv_mult (struct rtx_iv
*iv
, rtx mby
)
570 enum machine_mode mode
= iv
->extend_mode
;
572 if (GET_MODE (mby
) != VOIDmode
573 && GET_MODE (mby
) != mode
)
576 if (iv
->extend
== NIL
)
578 iv
->base
= simplify_gen_binary (MULT
, mode
, iv
->base
, mby
);
579 iv
->step
= simplify_gen_binary (MULT
, mode
, iv
->step
, mby
);
583 iv
->delta
= simplify_gen_binary (MULT
, mode
, iv
->delta
, mby
);
584 iv
->mult
= simplify_gen_binary (MULT
, mode
, iv
->mult
, mby
);
590 /* The recursive part of get_biv_step. Gets the value of the single value
591 defined in INSN wrto initial value of REG inside loop, in shape described
595 get_biv_step_1 (rtx insn
, rtx reg
,
596 rtx
*inner_step
, enum machine_mode
*inner_mode
,
597 enum rtx_code
*extend
, enum machine_mode outer_mode
,
600 rtx set
, lhs
, rhs
, op0
= NULL_RTX
, op1
= NULL_RTX
;
601 rtx next
, nextr
, def_insn
, tmp
;
604 set
= single_set (insn
);
605 rhs
= find_reg_equal_equiv_note (insn
);
608 lhs
= SET_DEST (set
);
610 code
= GET_CODE (rhs
);
623 if (code
== PLUS
&& CONSTANT_P (op0
))
625 tmp
= op0
; op0
= op1
; op1
= tmp
;
628 if (!simple_reg_p (op0
)
629 || !CONSTANT_P (op1
))
632 if (GET_MODE (rhs
) != outer_mode
)
634 /* ppc64 uses expressions like
636 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
638 this is equivalent to
640 (set x':DI (plus:DI y:DI 1))
641 (set x:SI (subreg:SI (x':DI)). */
642 if (GET_CODE (op0
) != SUBREG
)
644 if (GET_MODE (SUBREG_REG (op0
)) != outer_mode
)
653 if (GET_MODE (rhs
) != outer_mode
)
657 if (!simple_reg_p (op0
))
667 if (GET_CODE (next
) == SUBREG
)
669 if (!subreg_lowpart_p (next
))
672 nextr
= SUBREG_REG (next
);
673 if (GET_MODE (nextr
) != outer_mode
)
679 def_insn
= iv_get_reaching_def (insn
, nextr
);
680 if (def_insn
== const0_rtx
)
685 if (!rtx_equal_p (nextr
, reg
))
688 *inner_step
= const0_rtx
;
690 *inner_mode
= outer_mode
;
691 *outer_step
= const0_rtx
;
693 else if (!get_biv_step_1 (def_insn
, reg
,
694 inner_step
, inner_mode
, extend
, outer_mode
,
698 if (GET_CODE (next
) == SUBREG
)
700 enum machine_mode amode
= GET_MODE (next
);
702 if (GET_MODE_SIZE (amode
) > GET_MODE_SIZE (*inner_mode
))
706 *inner_step
= simplify_gen_binary (PLUS
, outer_mode
,
707 *inner_step
, *outer_step
);
708 *outer_step
= const0_rtx
;
720 if (*inner_mode
== outer_mode
721 /* See comment in previous switch. */
722 || GET_MODE (rhs
) != outer_mode
)
723 *inner_step
= simplify_gen_binary (code
, outer_mode
,
726 *outer_step
= simplify_gen_binary (code
, outer_mode
,
732 if (GET_MODE (op0
) != *inner_mode
734 || *outer_step
!= const0_rtx
)
747 /* Gets the operation on register REG inside loop, in shape
749 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
751 If the operation cannot be described in this shape, return false. */
754 get_biv_step (rtx reg
, rtx
*inner_step
, enum machine_mode
*inner_mode
,
755 enum rtx_code
*extend
, enum machine_mode
*outer_mode
,
758 *outer_mode
= GET_MODE (reg
);
760 if (!get_biv_step_1 (last_def
[REGNO (reg
)], reg
,
761 inner_step
, inner_mode
, extend
, *outer_mode
,
765 if (*inner_mode
!= *outer_mode
769 if (*inner_mode
== *outer_mode
773 if (*inner_mode
== *outer_mode
774 && *outer_step
!= const0_rtx
)
780 /* Determines whether DEF is a biv and if so, stores its description
784 iv_analyze_biv (rtx def
, struct rtx_iv
*iv
)
787 rtx inner_step
, outer_step
;
788 enum machine_mode inner_mode
, outer_mode
;
789 enum rtx_code extend
;
793 fprintf (dump_file
, "Analysing ");
794 print_rtl (dump_file
, def
);
795 fprintf (dump_file
, " for bivness.\n");
800 if (!CONSTANT_P (def
))
803 return iv_constant (iv
, def
, VOIDmode
);
807 if (last_def
[regno
] == const0_rtx
)
810 fprintf (dump_file
, " not simple.\n");
814 if (last_def
[regno
] && bivs
[regno
].analysed
)
817 fprintf (dump_file
, " already analysed.\n");
820 return iv
->base
!= NULL_RTX
;
823 if (!last_def
[regno
])
825 iv_constant (iv
, def
, VOIDmode
);
830 if (!get_biv_step (def
, &inner_step
, &inner_mode
, &extend
,
831 &outer_mode
, &outer_step
))
837 /* Loop transforms base to es (base + inner_step) + outer_step,
838 where es means extend of subreg between inner_mode and outer_mode.
839 The corresponding induction variable is
841 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
843 iv
->base
= simplify_gen_binary (MINUS
, outer_mode
, def
, outer_step
);
844 iv
->step
= simplify_gen_binary (PLUS
, outer_mode
, inner_step
, outer_step
);
845 iv
->mode
= inner_mode
;
846 iv
->extend_mode
= outer_mode
;
848 iv
->mult
= const1_rtx
;
849 iv
->delta
= outer_step
;
850 iv
->first_special
= inner_mode
!= outer_mode
;
855 fprintf (dump_file
, " ");
856 dump_iv_info (dump_file
, iv
);
857 fprintf (dump_file
, "\n");
862 return iv
->base
!= NULL_RTX
;
865 /* Analyzes operand OP of INSN and stores the result to *IV. */
868 iv_analyze_op (rtx insn
, rtx op
, struct rtx_iv
*iv
)
872 bool inv
= CONSTANT_P (op
);
876 fprintf (dump_file
, "Analysing operand ");
877 print_rtl (dump_file
, op
);
878 fprintf (dump_file
, " of insn ");
879 print_rtl_single (dump_file
, insn
);
882 if (GET_CODE (op
) == SUBREG
)
884 if (!subreg_lowpart_p (op
))
887 if (!iv_analyze_op (insn
, SUBREG_REG (op
), iv
))
890 return iv_subreg (iv
, GET_MODE (op
));
896 if (!last_def
[regno
])
898 else if (last_def
[regno
] == const0_rtx
)
901 fprintf (dump_file
, " not simple.\n");
908 iv_constant (iv
, op
, VOIDmode
);
912 fprintf (dump_file
, " ");
913 dump_iv_info (dump_file
, iv
);
914 fprintf (dump_file
, "\n");
919 def_insn
= iv_get_reaching_def (insn
, op
);
920 if (def_insn
== const0_rtx
)
923 fprintf (dump_file
, " not simple.\n");
927 return iv_analyze (def_insn
, op
, iv
);
930 /* Analyzes iv DEF defined in INSN and stores the result to *IV. */
933 iv_analyze (rtx insn
, rtx def
, struct rtx_iv
*iv
)
936 rtx set
, rhs
, mby
= NULL_RTX
, tmp
;
937 rtx op0
= NULL_RTX
, op1
= NULL_RTX
;
938 struct rtx_iv iv0
, iv1
;
939 enum machine_mode amode
;
942 if (insn
== const0_rtx
)
945 if (GET_CODE (def
) == SUBREG
)
947 if (!subreg_lowpart_p (def
))
950 if (!iv_analyze (insn
, SUBREG_REG (def
), iv
))
953 return iv_subreg (iv
, GET_MODE (def
));
957 return iv_analyze_biv (def
, iv
);
961 fprintf (dump_file
, "Analysing def of ");
962 print_rtl (dump_file
, def
);
963 fprintf (dump_file
, " in insn ");
964 print_rtl_single (dump_file
, insn
);
967 uid
= INSN_UID (insn
);
968 if (insn_info
[uid
].iv
.analysed
)
971 fprintf (dump_file
, " already analysed.\n");
972 *iv
= insn_info
[uid
].iv
;
973 return iv
->base
!= NULL_RTX
;
980 set
= single_set (insn
);
981 rhs
= find_reg_equal_equiv_note (insn
);
984 code
= GET_CODE (rhs
);
986 if (CONSTANT_P (rhs
))
989 amode
= GET_MODE (def
);
996 if (!subreg_lowpart_p (rhs
))
1008 op0
= XEXP (rhs
, 0);
1013 op0
= XEXP (rhs
, 0);
1014 op1
= XEXP (rhs
, 1);
1018 op0
= XEXP (rhs
, 0);
1019 mby
= XEXP (rhs
, 1);
1020 if (!CONSTANT_P (mby
))
1022 if (!CONSTANT_P (op0
))
1034 amode
= GET_MODE (rhs
);
1039 if (!iv_analyze_op (insn
, op0
, &iv0
))
1042 if (iv0
.mode
== VOIDmode
)
1045 iv0
.extend_mode
= amode
;
1051 if (!iv_analyze_op (insn
, op1
, &iv1
))
1054 if (iv1
.mode
== VOIDmode
)
1057 iv1
.extend_mode
= amode
;
1065 if (!iv_extend (&iv0
, code
, amode
))
1076 if (!iv_add (&iv0
, &iv1
, code
))
1081 if (!iv_mult (&iv0
, mby
))
1092 iv
->analysed
= true;
1093 insn_info
[uid
].iv
= *iv
;
1097 print_rtl (dump_file
, def
);
1098 fprintf (dump_file
, " in insn ");
1099 print_rtl_single (dump_file
, insn
);
1100 fprintf (dump_file
, " is ");
1101 dump_iv_info (dump_file
, iv
);
1102 fprintf (dump_file
, "\n");
1105 return iv
->base
!= NULL_RTX
;
1108 /* Calculates value of IV at ITERATION-th iteration. */
1111 get_iv_value (struct rtx_iv
*iv
, rtx iteration
)
1115 /* We would need to generate some if_then_else patterns, and so far
1116 it is not needed anywhere. */
1117 if (iv
->first_special
)
1120 if (iv
->step
!= const0_rtx
&& iteration
!= const0_rtx
)
1121 val
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->base
,
1122 simplify_gen_binary (MULT
, iv
->extend_mode
,
1123 iv
->step
, iteration
));
1127 if (iv
->extend_mode
== iv
->mode
)
1130 val
= lowpart_subreg (iv
->mode
, val
, iv
->extend_mode
);
1132 if (iv
->extend
== NIL
)
1135 val
= simplify_gen_unary (iv
->extend
, iv
->extend_mode
, val
, iv
->mode
);
1136 val
= simplify_gen_binary (PLUS
, iv
->extend_mode
, iv
->delta
,
1137 simplify_gen_binary (MULT
, iv
->extend_mode
,
1143 /* Free the data for an induction variable analysis. */
1146 iv_analysis_done (void)
1167 /* Computes inverse to X modulo (1 << MOD). */
1169 static unsigned HOST_WIDEST_INT
1170 inverse (unsigned HOST_WIDEST_INT x
, int mod
)
1172 unsigned HOST_WIDEST_INT mask
=
1173 ((unsigned HOST_WIDEST_INT
) 1 << (mod
- 1) << 1) - 1;
1174 unsigned HOST_WIDEST_INT rslt
= 1;
1177 for (i
= 0; i
< mod
- 1; i
++)
1179 rslt
= (rslt
* x
) & mask
;
1186 /* Tries to estimate the maximum number of iterations. */
1188 static unsigned HOST_WIDEST_INT
1189 determine_max_iter (struct niter_desc
*desc
)
1191 rtx niter
= desc
->niter_expr
;
1192 rtx mmin
, mmax
, left
, right
;
1193 unsigned HOST_WIDEST_INT nmax
, inc
;
1195 if (GET_CODE (niter
) == AND
1196 && GET_CODE (XEXP (niter
, 0)) == CONST_INT
)
1198 nmax
= INTVAL (XEXP (niter
, 0));
1199 if (!(nmax
& (nmax
+ 1)))
1201 desc
->niter_max
= nmax
;
1206 get_mode_bounds (desc
->mode
, desc
->signed_p
, &mmin
, &mmax
);
1207 nmax
= INTVAL (mmax
) - INTVAL (mmin
);
1209 if (GET_CODE (niter
) == UDIV
)
1211 if (GET_CODE (XEXP (niter
, 1)) != CONST_INT
)
1213 desc
->niter_max
= nmax
;
1216 inc
= INTVAL (XEXP (niter
, 1));
1217 niter
= XEXP (niter
, 0);
1222 if (GET_CODE (niter
) == PLUS
)
1224 left
= XEXP (niter
, 0);
1225 right
= XEXP (niter
, 0);
1227 if (GET_CODE (right
) == CONST_INT
)
1228 right
= GEN_INT (-INTVAL (right
));
1230 else if (GET_CODE (niter
) == MINUS
)
1232 left
= XEXP (niter
, 0);
1233 right
= XEXP (niter
, 0);
1241 if (GET_CODE (left
) == CONST_INT
)
1243 if (GET_CODE (right
) == CONST_INT
)
1245 nmax
= INTVAL (mmax
) - INTVAL (mmin
);
1247 desc
->niter_max
= nmax
/ inc
;
1251 /* Checks whether register *REG is in set ALT. Callback for for_each_rtx. */
1254 altered_reg_used (rtx
*reg
, void *alt
)
1259 return REGNO_REG_SET_P (alt
, REGNO (*reg
));
1262 /* Marks registers altered by EXPR in set ALT. */
1265 mark_altered (rtx expr
, rtx by ATTRIBUTE_UNUSED
, void *alt
)
1267 if (GET_CODE (expr
) == SUBREG
)
1268 expr
= SUBREG_REG (expr
);
1272 SET_REGNO_REG_SET (alt
, REGNO (expr
));
1275 /* Checks whether RHS is simple enough to process. */
1278 simple_rhs_p (rtx rhs
)
1282 if (CONSTANT_P (rhs
)
1286 switch (GET_CODE (rhs
))
1290 op0
= XEXP (rhs
, 0);
1291 op1
= XEXP (rhs
, 1);
1292 /* Allow reg + const sets only. */
1293 if (REG_P (op0
) && CONSTANT_P (op1
))
1295 if (REG_P (op1
) && CONSTANT_P (op0
))
1305 /* Simplifies *EXPR using assignment in INSN. ALTERED is the set of registers
1309 simplify_using_assignment (rtx insn
, rtx
*expr
, regset altered
)
1311 rtx set
= single_set (insn
);
1317 lhs
= SET_DEST (set
);
1318 if (GET_CODE (lhs
) != REG
1319 || altered_reg_used (&lhs
, altered
))
1325 note_stores (PATTERN (insn
), mark_altered
, altered
);
1326 if (GET_CODE (insn
) == CALL_INSN
)
1330 /* Kill all call clobbered registers. */
1331 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1332 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
1333 SET_REGNO_REG_SET (altered
, i
);
1339 rhs
= find_reg_equal_equiv_note (insn
);
1341 rhs
= SET_SRC (set
);
1343 if (!simple_rhs_p (rhs
))
1346 if (for_each_rtx (&rhs
, altered_reg_used
, altered
))
1349 *expr
= simplify_replace_rtx (*expr
, lhs
, rhs
);
1352 /* Checks whether A implies B. */
1355 implies_p (rtx a
, rtx b
)
1359 if (GET_CODE (a
) == EQ
)
1366 r
= simplify_replace_rtx (b
, op0
, op1
);
1367 if (r
== const_true_rtx
)
1373 r
= simplify_replace_rtx (b
, op1
, op0
);
1374 if (r
== const_true_rtx
)
1382 /* Canonicalizes COND so that
1384 (1) Ensure that operands are ordered according to
1385 swap_commutative_operands_p.
1386 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1387 for GE, GEU, and LEU. */
1390 canon_condition (rtx cond
)
1395 enum machine_mode mode
;
1397 code
= GET_CODE (cond
);
1398 op0
= XEXP (cond
, 0);
1399 op1
= XEXP (cond
, 1);
1401 if (swap_commutative_operands_p (op0
, op1
))
1403 code
= swap_condition (code
);
1409 mode
= GET_MODE (op0
);
1410 if (mode
== VOIDmode
)
1411 mode
= GET_MODE (op1
);
1412 if (mode
== VOIDmode
)
1415 if (GET_CODE (op1
) == CONST_INT
1416 && GET_MODE_CLASS (mode
) != MODE_CC
1417 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1419 HOST_WIDE_INT const_val
= INTVAL (op1
);
1420 unsigned HOST_WIDE_INT uconst_val
= const_val
;
1421 unsigned HOST_WIDE_INT max_val
1422 = (unsigned HOST_WIDE_INT
) GET_MODE_MASK (mode
);
1427 if ((unsigned HOST_WIDE_INT
) const_val
!= max_val
>> 1)
1428 code
= LT
, op1
= gen_int_mode (const_val
+ 1, GET_MODE (op0
));
1431 /* When cross-compiling, const_val might be sign-extended from
1432 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
1434 if ((HOST_WIDE_INT
) (const_val
& max_val
)
1435 != (((HOST_WIDE_INT
) 1
1436 << (GET_MODE_BITSIZE (GET_MODE (op0
)) - 1))))
1437 code
= GT
, op1
= gen_int_mode (const_val
- 1, mode
);
1441 if (uconst_val
< max_val
)
1442 code
= LTU
, op1
= gen_int_mode (uconst_val
+ 1, mode
);
1446 if (uconst_val
!= 0)
1447 code
= GTU
, op1
= gen_int_mode (uconst_val
- 1, mode
);
1455 if (op0
!= XEXP (cond
, 0)
1456 || op1
!= XEXP (cond
, 1)
1457 || code
!= GET_CODE (cond
)
1458 || GET_MODE (cond
) != SImode
)
1459 cond
= gen_rtx_fmt_ee (code
, SImode
, op0
, op1
);
1464 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1465 set of altered regs. */
1468 simplify_using_condition (rtx cond
, rtx
*expr
, regset altered
)
1470 rtx rev
, reve
, exp
= *expr
;
1472 if (!COMPARISON_P (exp
))
1475 /* If some register gets altered later, we do not really speak about its
1476 value at the time of comparison. */
1478 && for_each_rtx (&cond
, altered_reg_used
, altered
))
1481 rev
= reversed_condition (cond
);
1482 reve
= reversed_condition (exp
);
1484 cond
= canon_condition (cond
);
1485 exp
= canon_condition (exp
);
1487 rev
= canon_condition (rev
);
1489 reve
= canon_condition (reve
);
1491 if (rtx_equal_p (exp
, cond
))
1493 *expr
= const_true_rtx
;
1498 if (rev
&& rtx_equal_p (exp
, rev
))
1504 if (implies_p (cond
, exp
))
1506 *expr
= const_true_rtx
;
1510 if (reve
&& implies_p (cond
, reve
))
1516 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1518 if (rev
&& implies_p (exp
, rev
))
1524 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1525 if (rev
&& reve
&& implies_p (reve
, rev
))
1527 *expr
= const_true_rtx
;
1531 /* We would like to have some other tests here. TODO. */
1536 /* Use relationship between A and *B to eventually eliminate *B.
1537 OP is the operation we consider. */
1540 eliminate_implied_condition (enum rtx_code op
, rtx a
, rtx
*b
)
1544 /* If A implies *B, we may replace *B by true. */
1545 if (implies_p (a
, *b
))
1546 *b
= const_true_rtx
;
1550 /* If *B implies A, we may replace *B by false. */
1551 if (implies_p (*b
, a
))
1558 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1559 operation we consider. */
1562 eliminate_implied_conditions (enum rtx_code op
, rtx
*head
, rtx tail
)
1566 for (elt
= tail
; elt
; elt
= XEXP (elt
, 1))
1567 eliminate_implied_condition (op
, *head
, &XEXP (elt
, 0));
1568 for (elt
= tail
; elt
; elt
= XEXP (elt
, 1))
1569 eliminate_implied_condition (op
, XEXP (elt
, 0), head
);
1572 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1573 is a list, its elements are assumed to be combined using OP. */
1576 simplify_using_initial_values (struct loop
*loop
, enum rtx_code op
, rtx
*expr
)
1578 rtx head
, tail
, insn
;
1581 regset_head altered_head
;
1587 if (CONSTANT_P (*expr
))
1590 if (GET_CODE (*expr
) == EXPR_LIST
)
1592 head
= XEXP (*expr
, 0);
1593 tail
= XEXP (*expr
, 1);
1595 eliminate_implied_conditions (op
, &head
, tail
);
1599 neutral
= const_true_rtx
;
1604 neutral
= const0_rtx
;
1605 aggr
= const_true_rtx
;
1610 simplify_using_initial_values (loop
, NIL
, &head
);
1613 XEXP (*expr
, 0) = aggr
;
1614 XEXP (*expr
, 1) = NULL_RTX
;
1617 else if (head
== neutral
)
1620 simplify_using_initial_values (loop
, op
, expr
);
1623 simplify_using_initial_values (loop
, op
, &tail
);
1625 if (tail
&& XEXP (tail
, 0) == aggr
)
1631 XEXP (*expr
, 0) = head
;
1632 XEXP (*expr
, 1) = tail
;
1639 e
= loop_preheader_edge (loop
);
1640 if (e
->src
== ENTRY_BLOCK_PTR
)
1643 altered
= INITIALIZE_REG_SET (altered_head
);
1647 insn
= BB_END (e
->src
);
1648 if (any_condjump_p (insn
))
1650 /* FIXME -- slightly wrong -- what if compared register
1651 gets altered between start of the condition and insn? */
1652 rtx cond
= get_condition (BB_END (e
->src
), NULL
, false);
1654 if (cond
&& (e
->flags
& EDGE_FALLTHRU
))
1655 cond
= reversed_condition (cond
);
1658 simplify_using_condition (cond
, expr
, altered
);
1659 if (CONSTANT_P (*expr
))
1661 FREE_REG_SET (altered
);
1667 FOR_BB_INSNS_REVERSE (e
->src
, insn
)
1672 simplify_using_assignment (insn
, expr
, altered
);
1673 if (CONSTANT_P (*expr
))
1675 FREE_REG_SET (altered
);
1682 || e
->src
== ENTRY_BLOCK_PTR
)
1686 FREE_REG_SET (altered
);
1689 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
1690 that IV occurs as left operands of comparison COND and its signedness
1691 is SIGNED_P to DESC. */
1694 shorten_into_mode (struct rtx_iv
*iv
, enum machine_mode mode
,
1695 enum rtx_code cond
, bool signed_p
, struct niter_desc
*desc
)
1697 rtx mmin
, mmax
, cond_over
, cond_under
;
1699 get_mode_bounds (mode
, signed_p
, &mmin
, &mmax
);
1700 cond_under
= simplify_gen_relational (LT
, SImode
, iv
->extend_mode
,
1702 cond_over
= simplify_gen_relational (GT
, SImode
, iv
->extend_mode
,
1711 if (cond_under
!= const0_rtx
)
1713 alloc_EXPR_LIST (0, cond_under
, desc
->infinite
);
1714 if (cond_over
!= const0_rtx
)
1715 desc
->noloop_assumptions
=
1716 alloc_EXPR_LIST (0, cond_over
, desc
->noloop_assumptions
);
1723 if (cond_over
!= const0_rtx
)
1725 alloc_EXPR_LIST (0, cond_over
, desc
->infinite
);
1726 if (cond_under
!= const0_rtx
)
1727 desc
->noloop_assumptions
=
1728 alloc_EXPR_LIST (0, cond_under
, desc
->noloop_assumptions
);
1732 if (cond_over
!= const0_rtx
)
1734 alloc_EXPR_LIST (0, cond_over
, desc
->infinite
);
1735 if (cond_under
!= const0_rtx
)
1737 alloc_EXPR_LIST (0, cond_under
, desc
->infinite
);
1745 iv
->extend
= signed_p
? SIGN_EXTEND
: ZERO_EXTEND
;
1748 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
1749 subregs of the same mode if possible (sometimes it is necessary to add
1750 some assumptions to DESC). */
1753 canonicalize_iv_subregs (struct rtx_iv
*iv0
, struct rtx_iv
*iv1
,
1754 enum rtx_code cond
, struct niter_desc
*desc
)
1756 enum machine_mode comp_mode
;
1759 /* If the ivs behave specially in the first iteration, or are
1760 added/multiplied after extending, we ignore them. */
1761 if (iv0
->first_special
|| iv0
->mult
!= const1_rtx
|| iv0
->delta
!= const0_rtx
)
1763 if (iv1
->first_special
|| iv1
->mult
!= const1_rtx
|| iv1
->delta
!= const0_rtx
)
1766 /* If there is some extend, it must match signedness of the comparison. */
1771 if (iv0
->extend
== ZERO_EXTEND
1772 || iv1
->extend
== ZERO_EXTEND
)
1779 if (iv0
->extend
== SIGN_EXTEND
1780 || iv1
->extend
== SIGN_EXTEND
)
1786 if (iv0
->extend
!= NIL
1787 && iv1
->extend
!= NIL
1788 && iv0
->extend
!= iv1
->extend
)
1792 if (iv0
->extend
!= NIL
)
1793 signed_p
= iv0
->extend
== SIGN_EXTEND
;
1794 if (iv1
->extend
!= NIL
)
1795 signed_p
= iv1
->extend
== SIGN_EXTEND
;
1802 /* Values of both variables should be computed in the same mode. These
1803 might indeed be different, if we have comparison like
1805 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
1807 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
1808 in different modes. This does not seem impossible to handle, but
1809 it hardly ever occurs in practice.
1811 The only exception is the case when one of operands is invariant.
1812 For example pentium 3 generates comparisons like
1813 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
1814 definitely do not want this prevent the optimization. */
1815 comp_mode
= iv0
->extend_mode
;
1816 if (GET_MODE_BITSIZE (comp_mode
) < GET_MODE_BITSIZE (iv1
->extend_mode
))
1817 comp_mode
= iv1
->extend_mode
;
1819 if (iv0
->extend_mode
!= comp_mode
)
1821 if (iv0
->mode
!= iv0
->extend_mode
1822 || iv0
->step
!= const0_rtx
)
1825 iv0
->base
= simplify_gen_unary (signed_p
? SIGN_EXTEND
: ZERO_EXTEND
,
1826 comp_mode
, iv0
->base
, iv0
->mode
);
1827 iv0
->extend_mode
= comp_mode
;
1830 if (iv1
->extend_mode
!= comp_mode
)
1832 if (iv1
->mode
!= iv1
->extend_mode
1833 || iv1
->step
!= const0_rtx
)
1836 iv1
->base
= simplify_gen_unary (signed_p
? SIGN_EXTEND
: ZERO_EXTEND
,
1837 comp_mode
, iv1
->base
, iv1
->mode
);
1838 iv1
->extend_mode
= comp_mode
;
1841 /* Check that both ivs belong to a range of a single mode. If one of the
1842 operands is an invariant, we may need to shorten it into the common
1844 if (iv0
->mode
== iv0
->extend_mode
1845 && iv0
->step
== const0_rtx
1846 && iv0
->mode
!= iv1
->mode
)
1847 shorten_into_mode (iv0
, iv1
->mode
, cond
, signed_p
, desc
);
1849 if (iv1
->mode
== iv1
->extend_mode
1850 && iv1
->step
== const0_rtx
1851 && iv0
->mode
!= iv1
->mode
)
1852 shorten_into_mode (iv1
, iv0
->mode
, swap_condition (cond
), signed_p
, desc
);
1854 if (iv0
->mode
!= iv1
->mode
)
1857 desc
->mode
= iv0
->mode
;
1858 desc
->signed_p
= signed_p
;
1863 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
1864 the result into DESC. Very similar to determine_number_of_iterations
1865 (basically its rtl version), complicated by things like subregs. */
1868 iv_number_of_iterations (struct loop
*loop
, rtx insn
, rtx condition
,
1869 struct niter_desc
*desc
)
1871 rtx op0
, op1
, delta
, step
, bound
, may_xform
, def_insn
, tmp
, tmp0
, tmp1
;
1872 struct rtx_iv iv0
, iv1
, tmp_iv
;
1875 enum machine_mode mode
, comp_mode
;
1877 unsigned HOST_WIDEST_INT s
, size
, d
;
1878 HOST_WIDEST_INT up
, down
, inc
;
1879 int was_sharp
= false;
1881 /* The meaning of these assumptions is this:
1883 then the rest of information does not have to be valid
1884 if noloop_assumptions then the loop does not roll
1885 if infinite then this exit is never used */
1887 desc
->assumptions
= NULL_RTX
;
1888 desc
->noloop_assumptions
= NULL_RTX
;
1889 desc
->infinite
= NULL_RTX
;
1890 desc
->simple_p
= true;
1892 desc
->const_iter
= false;
1893 desc
->niter_expr
= NULL_RTX
;
1894 desc
->niter_max
= 0;
1896 cond
= GET_CODE (condition
);
1897 if (!COMPARISON_P (condition
))
1900 mode
= GET_MODE (XEXP (condition
, 0));
1901 if (mode
== VOIDmode
)
1902 mode
= GET_MODE (XEXP (condition
, 1));
1903 /* The constant comparisons should be folded. */
1904 if (mode
== VOIDmode
)
1907 /* We only handle integers or pointers. */
1908 if (GET_MODE_CLASS (mode
) != MODE_INT
1909 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1912 op0
= XEXP (condition
, 0);
1913 def_insn
= iv_get_reaching_def (insn
, op0
);
1914 if (!iv_analyze (def_insn
, op0
, &iv0
))
1916 if (iv0
.extend_mode
== VOIDmode
)
1917 iv0
.mode
= iv0
.extend_mode
= mode
;
1919 op1
= XEXP (condition
, 1);
1920 def_insn
= iv_get_reaching_def (insn
, op1
);
1921 if (!iv_analyze (def_insn
, op1
, &iv1
))
1923 if (iv1
.extend_mode
== VOIDmode
)
1924 iv1
.mode
= iv1
.extend_mode
= mode
;
1926 if (GET_MODE_BITSIZE (iv0
.extend_mode
) > HOST_BITS_PER_WIDE_INT
1927 || GET_MODE_BITSIZE (iv1
.extend_mode
) > HOST_BITS_PER_WIDE_INT
)
1930 /* Check condition and normalize it. */
1938 tmp_iv
= iv0
; iv0
= iv1
; iv1
= tmp_iv
;
1939 cond
= swap_condition (cond
);
1951 /* Handle extends. This is relatively nontrivial, so we only try in some
1952 easy cases, when we can canonicalize the ivs (possibly by adding some
1953 assumptions) to shape subreg (base + i * step). This function also fills
1954 in desc->mode and desc->signed_p. */
1956 if (!canonicalize_iv_subregs (&iv0
, &iv1
, cond
, desc
))
1959 comp_mode
= iv0
.extend_mode
;
1961 size
= GET_MODE_BITSIZE (mode
);
1962 get_mode_bounds (mode
, (cond
== LE
|| cond
== LT
), &mmin
, &mmax
);
1964 if (GET_CODE (iv0
.step
) != CONST_INT
|| GET_CODE (iv1
.step
) != CONST_INT
)
1967 /* We can take care of the case of two induction variables chasing each other
1968 if the test is NE. I have never seen a loop using it, but still it is
1970 if (iv0
.step
!= const0_rtx
&& iv1
.step
!= const0_rtx
)
1975 iv0
.step
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.step
, iv1
.step
);
1976 iv1
.step
= const0_rtx
;
1979 /* This is either infinite loop or the one that ends immediately, depending
1980 on initial values. Unswitching should remove this kind of conditions. */
1981 if (iv0
.step
== const0_rtx
&& iv1
.step
== const0_rtx
)
1984 /* Ignore loops of while (i-- < 10) type. */
1986 && (INTVAL (iv0
.step
) < 0 || INTVAL (iv1
.step
) > 0))
1989 /* Some more condition normalization. We must record some assumptions
1990 due to overflows. */
1995 /* We want to take care only of non-sharp relationals; this is easy,
1996 as in cases the overflow would make the transformation unsafe
1997 the loop does not roll. Seemingly it would make more sense to want
1998 to take care of sharp relationals instead, as NE is more similar to
1999 them, but the problem is that here the transformation would be more
2000 difficult due to possibly infinite loops. */
2001 if (iv0
.step
== const0_rtx
)
2003 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2004 assumption
= simplify_gen_relational (EQ
, SImode
, mode
, tmp
, mmax
);
2005 if (assumption
== const_true_rtx
)
2007 iv0
.base
= simplify_gen_binary (PLUS
, comp_mode
,
2008 iv0
.base
, const1_rtx
);
2012 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2013 assumption
= simplify_gen_relational (EQ
, SImode
, mode
, tmp
, mmin
);
2014 if (assumption
== const_true_rtx
)
2016 iv1
.base
= simplify_gen_binary (PLUS
, comp_mode
,
2017 iv1
.base
, constm1_rtx
);
2020 if (assumption
!= const0_rtx
)
2021 desc
->noloop_assumptions
=
2022 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2023 cond
= (cond
== LT
) ? LE
: LEU
;
2025 /* It will be useful to be able to tell the difference once more in
2026 LE -> NE reduction. */
2032 /* Take care of trivially infinite loops. */
2035 if (iv0
.step
== const0_rtx
)
2037 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2038 if (rtx_equal_p (tmp
, mmin
))
2041 alloc_EXPR_LIST (0, const_true_rtx
, NULL_RTX
);
2047 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2048 if (rtx_equal_p (tmp
, mmax
))
2051 alloc_EXPR_LIST (0, const_true_rtx
, NULL_RTX
);
2057 /* If we can we want to take care of NE conditions instead of size
2058 comparisons, as they are much more friendly (most importantly
2059 this takes care of special handling of loops with step 1). We can
2060 do it if we first check that upper bound is greater or equal to
2061 lower bound, their difference is constant c modulo step and that
2062 there is not an overflow. */
2065 if (iv0
.step
== const0_rtx
)
2066 step
= simplify_gen_unary (NEG
, comp_mode
, iv1
.step
, comp_mode
);
2069 delta
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, iv0
.base
);
2070 delta
= lowpart_subreg (mode
, delta
, comp_mode
);
2071 delta
= simplify_gen_binary (UMOD
, mode
, delta
, step
);
2072 may_xform
= const0_rtx
;
2074 if (GET_CODE (delta
) == CONST_INT
)
2076 if (was_sharp
&& INTVAL (delta
) == INTVAL (step
) - 1)
2078 /* A special case. We have transformed condition of type
2079 for (i = 0; i < 4; i += 4)
2081 for (i = 0; i <= 3; i += 4)
2082 obviously if the test for overflow during that transformation
2083 passed, we cannot overflow here. Most importantly any
2084 loop with sharp end condition and step 1 falls into this
2085 category, so handling this case specially is definitely
2086 worth the troubles. */
2087 may_xform
= const_true_rtx
;
2089 else if (iv0
.step
== const0_rtx
)
2091 bound
= simplify_gen_binary (PLUS
, comp_mode
, mmin
, step
);
2092 bound
= simplify_gen_binary (MINUS
, comp_mode
, bound
, delta
);
2093 bound
= lowpart_subreg (mode
, bound
, comp_mode
);
2094 tmp
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2095 may_xform
= simplify_gen_relational (cond
, SImode
, mode
,
2100 bound
= simplify_gen_binary (MINUS
, comp_mode
, mmax
, step
);
2101 bound
= simplify_gen_binary (PLUS
, comp_mode
, bound
, delta
);
2102 bound
= lowpart_subreg (mode
, bound
, comp_mode
);
2103 tmp
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2104 may_xform
= simplify_gen_relational (cond
, SImode
, mode
,
2109 if (may_xform
!= const0_rtx
)
2111 /* We perform the transformation always provided that it is not
2112 completely senseless. This is OK, as we would need this assumption
2113 to determine the number of iterations anyway. */
2114 if (may_xform
!= const_true_rtx
)
2115 desc
->assumptions
= alloc_EXPR_LIST (0, may_xform
,
2118 /* We are going to lose some information about upper bound on
2119 number of iterations in this step, so record the information
2121 inc
= INTVAL (iv0
.step
) - INTVAL (iv1
.step
);
2122 if (GET_CODE (iv1
.base
) == CONST_INT
)
2123 up
= INTVAL (iv1
.base
);
2125 up
= INTVAL (mmax
) - inc
;
2126 down
= INTVAL (GET_CODE (iv0
.base
) == CONST_INT
? iv0
.base
: mmin
);
2127 desc
->niter_max
= (up
- down
) / inc
+ 1;
2129 if (iv0
.step
== const0_rtx
)
2131 iv0
.base
= simplify_gen_binary (PLUS
, comp_mode
, iv0
.base
, delta
);
2132 iv0
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.base
, step
);
2136 iv1
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, delta
);
2137 iv1
.base
= simplify_gen_binary (PLUS
, comp_mode
, iv1
.base
, step
);
2140 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2141 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2142 assumption
= simplify_gen_relational (reverse_condition (cond
),
2143 SImode
, mode
, tmp0
, tmp1
);
2144 if (assumption
== const_true_rtx
)
2146 else if (assumption
!= const0_rtx
)
2147 desc
->noloop_assumptions
=
2148 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2153 /* Count the number of iterations. */
2156 /* Everything we do here is just arithmetics modulo size of mode. This
2157 makes us able to do more involved computations of number of iterations
2158 than in other cases. First transform the condition into shape
2159 s * i <> c, with s positive. */
2160 iv1
.base
= simplify_gen_binary (MINUS
, comp_mode
, iv1
.base
, iv0
.base
);
2161 iv0
.base
= const0_rtx
;
2162 iv0
.step
= simplify_gen_binary (MINUS
, comp_mode
, iv0
.step
, iv1
.step
);
2163 iv1
.step
= const0_rtx
;
2164 if (INTVAL (iv0
.step
) < 0)
2166 iv0
.step
= simplify_gen_unary (NEG
, comp_mode
, iv0
.step
, mode
);
2167 iv1
.base
= simplify_gen_unary (NEG
, comp_mode
, iv1
.base
, mode
);
2169 iv0
.step
= lowpart_subreg (mode
, iv0
.step
, comp_mode
);
2171 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2172 is infinite. Otherwise, the number of iterations is
2173 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2174 s
= INTVAL (iv0
.step
); d
= 1;
2181 bound
= GEN_INT (((unsigned HOST_WIDEST_INT
) 1 << (size
- 1 ) << 1) - 1);
2183 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2184 tmp
= simplify_gen_binary (UMOD
, mode
, tmp1
, GEN_INT (d
));
2185 assumption
= simplify_gen_relational (NE
, SImode
, mode
, tmp
, const0_rtx
);
2186 desc
->infinite
= alloc_EXPR_LIST (0, assumption
, desc
->infinite
);
2188 tmp
= simplify_gen_binary (UDIV
, mode
, tmp1
, GEN_INT (d
));
2189 tmp
= simplify_gen_binary (MULT
, mode
,
2190 tmp
, GEN_INT (inverse (s
, size
)));
2191 desc
->niter_expr
= simplify_gen_binary (AND
, mode
, tmp
, bound
);
2195 if (iv1
.step
== const0_rtx
)
2196 /* Condition in shape a + s * i <= b
2197 We must know that b + s does not overflow and a <= b + s and then we
2198 can compute number of iterations as (b + s - a) / s. (It might
2199 seem that we in fact could be more clever about testing the b + s
2200 overflow condition using some information about b - a mod s,
2201 but it was already taken into account during LE -> NE transform). */
2204 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2205 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2207 bound
= simplify_gen_binary (MINUS
, mode
, mmax
, step
);
2208 assumption
= simplify_gen_relational (cond
, SImode
, mode
,
2211 alloc_EXPR_LIST (0, assumption
, desc
->assumptions
);
2213 tmp
= simplify_gen_binary (PLUS
, comp_mode
, iv1
.base
, iv0
.step
);
2214 tmp
= lowpart_subreg (mode
, tmp
, comp_mode
);
2215 assumption
= simplify_gen_relational (reverse_condition (cond
),
2216 SImode
, mode
, tmp0
, tmp
);
2218 delta
= simplify_gen_binary (PLUS
, mode
, tmp1
, step
);
2219 delta
= simplify_gen_binary (MINUS
, mode
, delta
, tmp0
);
2223 /* Condition in shape a <= b - s * i
2224 We must know that a - s does not overflow and a - s <= b and then
2225 we can again compute number of iterations as (b - (a - s)) / s. */
2226 step
= simplify_gen_unary (NEG
, mode
, iv1
.step
, mode
);
2227 tmp0
= lowpart_subreg (mode
, iv0
.base
, comp_mode
);
2228 tmp1
= lowpart_subreg (mode
, iv1
.base
, comp_mode
);
2230 bound
= simplify_gen_binary (MINUS
, mode
, mmin
, step
);
2231 assumption
= simplify_gen_relational (cond
, SImode
, mode
,
2234 alloc_EXPR_LIST (0, assumption
, desc
->assumptions
);
2236 tmp
= simplify_gen_binary (PLUS
, comp_mode
, iv0
.base
, iv1
.step
);
2237 tmp
= lowpart_subreg (mode
, tmp
, comp_mode
);
2238 assumption
= simplify_gen_relational (reverse_condition (cond
),
2241 delta
= simplify_gen_binary (MINUS
, mode
, tmp0
, step
);
2242 delta
= simplify_gen_binary (MINUS
, mode
, tmp1
, delta
);
2244 if (assumption
== const_true_rtx
)
2246 else if (assumption
!= const0_rtx
)
2247 desc
->noloop_assumptions
=
2248 alloc_EXPR_LIST (0, assumption
, desc
->noloop_assumptions
);
2249 delta
= simplify_gen_binary (UDIV
, mode
, delta
, step
);
2250 desc
->niter_expr
= delta
;
2253 simplify_using_initial_values (loop
, AND
, &desc
->assumptions
);
2254 if (desc
->assumptions
2255 && XEXP (desc
->assumptions
, 0) == const0_rtx
)
2257 simplify_using_initial_values (loop
, IOR
, &desc
->noloop_assumptions
);
2258 simplify_using_initial_values (loop
, IOR
, &desc
->infinite
);
2259 simplify_using_initial_values (loop
, NIL
, &desc
->niter_expr
);
2261 /* Rerun the simplification. Consider code (created by copying loop headers)
2273 The first pass determines that i = 0, the second pass uses it to eliminate
2274 noloop assumption. */
2276 simplify_using_initial_values (loop
, AND
, &desc
->assumptions
);
2277 if (desc
->assumptions
2278 && XEXP (desc
->assumptions
, 0) == const0_rtx
)
2280 simplify_using_initial_values (loop
, IOR
, &desc
->noloop_assumptions
);
2281 simplify_using_initial_values (loop
, IOR
, &desc
->infinite
);
2282 simplify_using_initial_values (loop
, NIL
, &desc
->niter_expr
);
2284 if (GET_CODE (desc
->niter_expr
) == CONST_INT
)
2286 unsigned HOST_WIDEST_INT val
= INTVAL (desc
->niter_expr
);
2288 desc
->const_iter
= true;
2289 desc
->niter_max
= desc
->niter
= val
& GET_MODE_MASK (desc
->mode
);
2291 else if (!desc
->niter_max
)
2292 desc
->niter_max
= determine_max_iter (desc
);
2297 desc
->simple_p
= false;
2301 desc
->const_iter
= true;
2303 desc
->niter_max
= 0;
2304 desc
->niter_expr
= const0_rtx
;
2308 /* Checks whether E is a simple exit from LOOP and stores its description
2312 check_simple_exit (struct loop
*loop
, edge e
, struct niter_desc
*desc
)
2314 basic_block exit_bb
;
2319 desc
->simple_p
= false;
2321 /* It must belong directly to the loop. */
2322 if (exit_bb
->loop_father
!= loop
)
2325 /* It must be tested (at least) once during any iteration. */
2326 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit_bb
))
2329 /* It must end in a simple conditional jump. */
2330 if (!any_condjump_p (BB_END (exit_bb
)))
2340 /* Test whether the condition is suitable. */
2341 if (!(condition
= get_condition (BB_END (ei
->src
), &at
, false)))
2344 if (ei
->flags
& EDGE_FALLTHRU
)
2346 condition
= reversed_condition (condition
);
2351 /* Check that we are able to determine number of iterations and fill
2352 in information about it. */
2353 iv_number_of_iterations (loop
, at
, condition
, desc
);
2356 /* Finds a simple exit of LOOP and stores its description into DESC. */
2359 find_simple_exit (struct loop
*loop
, struct niter_desc
*desc
)
2364 struct niter_desc act
;
2367 desc
->simple_p
= false;
2368 body
= get_loop_body (loop
);
2370 for (i
= 0; i
< loop
->num_nodes
; i
++)
2372 for (e
= body
[i
]->succ
; e
; e
= e
->succ_next
)
2374 if (flow_bb_inside_loop_p (loop
, e
->dest
))
2377 check_simple_exit (loop
, e
, &act
);
2381 /* Prefer constant iterations; the less the better. */
2384 else if (!act
.const_iter
2385 || (desc
->const_iter
&& act
.niter
>= desc
->niter
))
2395 fprintf (dump_file
, "Loop %d is simple:\n", loop
->num
);
2396 fprintf (dump_file
, " simple exit %d -> %d\n",
2397 desc
->out_edge
->src
->index
,
2398 desc
->out_edge
->dest
->index
);
2399 if (desc
->assumptions
)
2401 fprintf (dump_file
, " assumptions: ");
2402 print_rtl (dump_file
, desc
->assumptions
);
2403 fprintf (dump_file
, "\n");
2405 if (desc
->noloop_assumptions
)
2407 fprintf (dump_file
, " does not roll if: ");
2408 print_rtl (dump_file
, desc
->noloop_assumptions
);
2409 fprintf (dump_file
, "\n");
2413 fprintf (dump_file
, " infinite if: ");
2414 print_rtl (dump_file
, desc
->infinite
);
2415 fprintf (dump_file
, "\n");
2418 fprintf (dump_file
, " number of iterations: ");
2419 print_rtl (dump_file
, desc
->niter_expr
);
2420 fprintf (dump_file
, "\n");
2422 fprintf (dump_file
, " upper bound: ");
2423 fprintf (dump_file
, HOST_WIDEST_INT_PRINT_DEC
, desc
->niter_max
);
2424 fprintf (dump_file
, "\n");
2427 fprintf (dump_file
, "Loop %d is not simple.\n", loop
->num
);
2433 /* Creates a simple loop description of LOOP if it was not computed
2437 get_simple_loop_desc (struct loop
*loop
)
2439 struct niter_desc
*desc
= simple_loop_desc (loop
);
2444 desc
= xmalloc (sizeof (struct niter_desc
));
2445 iv_analysis_loop_init (loop
);
2446 find_simple_exit (loop
, desc
);
2452 /* Releases simple loop description for LOOP. */
2455 free_simple_loop_desc (struct loop
*loop
)
2457 struct niter_desc
*desc
= simple_loop_desc (loop
);